Merge pull request #15539 from jpountz/fix/immutable_document_mapper

Make mapping updates more robust.
This commit is contained in:
Adrien Grand 2015-12-23 09:55:42 +01:00
commit a2072fe927
47 changed files with 694 additions and 665 deletions

View File

@ -259,9 +259,8 @@ public class MetaDataMappingService extends AbstractComponent {
} else { } else {
newMapper = indexService.mapperService().parse(request.type(), mappingUpdateSource, existingMapper == null); newMapper = indexService.mapperService().parse(request.type(), mappingUpdateSource, existingMapper == null);
if (existingMapper != null) { if (existingMapper != null) {
// first, simulate // first, simulate: just call merge and ignore the result
// this will just throw exceptions in case of problems existingMapper.merge(newMapper.mapping(), request.updateAllTypes());
existingMapper.merge(newMapper.mapping(), true, request.updateAllTypes());
} else { } else {
// TODO: can we find a better place for this validation? // TODO: can we find a better place for this validation?
// The reason this validation is here is that the mapper service doesn't learn about // The reason this validation is here is that the mapper service doesn't learn about

View File

@ -23,36 +23,24 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
import org.elasticsearch.common.collect.CopyOnWriteHashMap; import org.elasticsearch.common.collect.CopyOnWriteHashMap;
import java.util.AbstractMap;
import java.util.Map; import java.util.Map;
import java.util.stream.Stream;
/** /**
* *
*/ */
public final class FieldNameAnalyzer extends DelegatingAnalyzerWrapper { public final class FieldNameAnalyzer extends DelegatingAnalyzerWrapper {
private final CopyOnWriteHashMap<String, Analyzer> analyzers; private final Map<String, Analyzer> analyzers;
private final Analyzer defaultAnalyzer;
public FieldNameAnalyzer(Analyzer defaultAnalyzer) { public FieldNameAnalyzer(Map<String, Analyzer> analyzers) {
this(new CopyOnWriteHashMap<>(), defaultAnalyzer);
}
public FieldNameAnalyzer(Map<String, Analyzer> analyzers, Analyzer defaultAnalyzer) {
super(Analyzer.PER_FIELD_REUSE_STRATEGY); super(Analyzer.PER_FIELD_REUSE_STRATEGY);
this.analyzers = CopyOnWriteHashMap.copyOf(analyzers); this.analyzers = CopyOnWriteHashMap.copyOf(analyzers);
this.defaultAnalyzer = defaultAnalyzer;
} }
public Map<String, Analyzer> analyzers() { public Map<String, Analyzer> analyzers() {
return analyzers; return analyzers;
} }
public Analyzer defaultAnalyzer() {
return defaultAnalyzer;
}
@Override @Override
protected Analyzer getWrappedAnalyzer(String fieldName) { protected Analyzer getWrappedAnalyzer(String fieldName) {
Analyzer analyzer = analyzers.get(fieldName); Analyzer analyzer = analyzers.get(fieldName);
@ -63,18 +51,4 @@ public final class FieldNameAnalyzer extends DelegatingAnalyzerWrapper {
// Fields need to be explicitly added // Fields need to be explicitly added
throw new IllegalArgumentException("Field [" + fieldName + "] has no associated analyzer"); throw new IllegalArgumentException("Field [" + fieldName + "] has no associated analyzer");
} }
/**
* Return a new instance that contains the union of this and of the provided analyzers.
*/
public FieldNameAnalyzer copyAndAddAll(Stream<? extends Map.Entry<String, Analyzer>> mappers) {
CopyOnWriteHashMap<String, Analyzer> result = analyzers.copyAndPutAll(mappers.map((e) -> {
if (e.getValue() == null) {
return new AbstractMap.SimpleImmutableEntry<>(e.getKey(), defaultAnalyzer);
}
return e;
}));
return new FieldNameAnalyzer(result, defaultAnalyzer);
}
} }

View File

@ -20,15 +20,15 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.FieldNameAnalyzer; import org.elasticsearch.index.analysis.FieldNameAnalyzer;
import java.util.AbstractMap;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map;
import java.util.Set; import java.util.Set;
/** /**
@ -37,44 +37,38 @@ import java.util.Set;
public final class DocumentFieldMappers implements Iterable<FieldMapper> { public final class DocumentFieldMappers implements Iterable<FieldMapper> {
/** Full field name to mapper */ /** Full field name to mapper */
private final CopyOnWriteHashMap<String, FieldMapper> fieldMappers; private final Map<String, FieldMapper> fieldMappers;
private final FieldNameAnalyzer indexAnalyzer; private final FieldNameAnalyzer indexAnalyzer;
private final FieldNameAnalyzer searchAnalyzer; private final FieldNameAnalyzer searchAnalyzer;
private final FieldNameAnalyzer searchQuoteAnalyzer; private final FieldNameAnalyzer searchQuoteAnalyzer;
public DocumentFieldMappers(AnalysisService analysisService) { private static void put(Map<String, Analyzer> analyzers, String key, Analyzer value, Analyzer defaultValue) {
this(new CopyOnWriteHashMap<String, FieldMapper>(), if (value == null) {
new FieldNameAnalyzer(analysisService.defaultIndexAnalyzer()), value = defaultValue;
new FieldNameAnalyzer(analysisService.defaultSearchAnalyzer()),
new FieldNameAnalyzer(analysisService.defaultSearchQuoteAnalyzer()));
}
private DocumentFieldMappers(CopyOnWriteHashMap<String, FieldMapper> fieldMappers, FieldNameAnalyzer indexAnalyzer, FieldNameAnalyzer searchAnalyzer, FieldNameAnalyzer searchQuoteAnalyzer) {
this.fieldMappers = fieldMappers;
this.indexAnalyzer = indexAnalyzer;
this.searchAnalyzer = searchAnalyzer;
this.searchQuoteAnalyzer = searchQuoteAnalyzer;
}
public DocumentFieldMappers copyAndAllAll(Collection<FieldMapper> newMappers) {
CopyOnWriteHashMap<String, FieldMapper> map = this.fieldMappers;
for (FieldMapper fieldMapper : newMappers) {
map = map.copyAndPut(fieldMapper.fieldType().names().fullName(), fieldMapper);
} }
FieldNameAnalyzer indexAnalyzer = this.indexAnalyzer.copyAndAddAll(newMappers.stream().map((input) -> analyzers.put(key, value);
new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().indexAnalyzer())
));
FieldNameAnalyzer searchAnalyzer = this.searchAnalyzer.copyAndAddAll(newMappers.stream().map((input) ->
new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchAnalyzer())
));
FieldNameAnalyzer searchQuoteAnalyzer = this.searchQuoteAnalyzer.copyAndAddAll(newMappers.stream().map((input) ->
new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer) input.fieldType().searchQuoteAnalyzer())
));
return new DocumentFieldMappers(map,indexAnalyzer,searchAnalyzer,searchQuoteAnalyzer);
} }
/** Returns the mapper for the given field */ public DocumentFieldMappers(Collection<FieldMapper> mappers, Analyzer defaultIndex, Analyzer defaultSearch, Analyzer defaultSearchQuote) {
Map<String, FieldMapper> fieldMappers = new HashMap<>();
Map<String, Analyzer> indexAnalyzers = new HashMap<>();
Map<String, Analyzer> searchAnalyzers = new HashMap<>();
Map<String, Analyzer> searchQuoteAnalyzers = new HashMap<>();
for (FieldMapper mapper : mappers) {
fieldMappers.put(mapper.name(), mapper);
MappedFieldType fieldType = mapper.fieldType();
put(indexAnalyzers, fieldType.names().indexName(), fieldType.indexAnalyzer(), defaultIndex);
put(searchAnalyzers, fieldType.names().indexName(), fieldType.searchAnalyzer(), defaultSearch);
put(searchQuoteAnalyzers, fieldType.names().indexName(), fieldType.searchQuoteAnalyzer(), defaultSearchQuote);
}
this.fieldMappers = Collections.unmodifiableMap(fieldMappers);
this.indexAnalyzer = new FieldNameAnalyzer(indexAnalyzers);
this.searchAnalyzer = new FieldNameAnalyzer(searchAnalyzers);
this.searchQuoteAnalyzer = new FieldNameAnalyzer(searchQuoteAnalyzers);
}
/** Returns the mapper for the given field */
public FieldMapper getMapper(String field) { public FieldMapper getMapper(String field) {
return fieldMappers.get(field); return fieldMappers.get(field);
} }
@ -112,14 +106,6 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper> {
return this.indexAnalyzer; return this.indexAnalyzer;
} }
/**
* A smart analyzer used for indexing that takes into account specific analyzers configured
* per {@link FieldMapper} with a custom default analyzer for no explicit field analyzer.
*/
public Analyzer indexAnalyzer(Analyzer defaultAnalyzer) {
return new FieldNameAnalyzer(indexAnalyzer.analyzers(), defaultAnalyzer);
}
/** /**
* A smart analyzer used for searching that takes into account specific analyzers configured * A smart analyzer used for searching that takes into account specific analyzers configured
* per {@link FieldMapper}. * per {@link FieldMapper}.

View File

@ -24,16 +24,15 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight; import org.apache.lucene.search.Weight;
import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.concurrent.ReleasableLock;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.MetadataFieldMapper.TypeParser; import org.elasticsearch.index.mapper.MetadataFieldMapper.TypeParser;
import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.mapper.internal.IdFieldMapper; import org.elasticsearch.index.mapper.internal.IdFieldMapper;
@ -51,15 +50,12 @@ import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
@ -72,16 +68,14 @@ public class DocumentMapper implements ToXContent {
private Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = new LinkedHashMap<>(); private Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = new LinkedHashMap<>();
private final Settings indexSettings;
private final RootObjectMapper rootObjectMapper; private final RootObjectMapper rootObjectMapper;
private Map<String, Object> meta = emptyMap(); private Map<String, Object> meta = emptyMap();
private final Mapper.BuilderContext builderContext; private final Mapper.BuilderContext builderContext;
public Builder(Settings indexSettings, RootObjectMapper.Builder builder, MapperService mapperService) { public Builder(RootObjectMapper.Builder builder, MapperService mapperService) {
this.indexSettings = indexSettings; final Settings indexSettings = mapperService.getIndexSettings().getSettings();
this.builderContext = new Mapper.BuilderContext(indexSettings, new ContentPath(1)); this.builderContext = new Mapper.BuilderContext(indexSettings, new ContentPath(1));
this.rootObjectMapper = builder.build(builderContext); this.rootObjectMapper = builder.build(builderContext);
@ -104,9 +98,14 @@ public class DocumentMapper implements ToXContent {
return this; return this;
} }
public DocumentMapper build(MapperService mapperService, DocumentMapperParser docMapperParser) { public DocumentMapper build(MapperService mapperService) {
Objects.requireNonNull(rootObjectMapper, "Mapper builder must have the root object mapper set"); Objects.requireNonNull(rootObjectMapper, "Mapper builder must have the root object mapper set");
return new DocumentMapper(mapperService, indexSettings, docMapperParser, rootObjectMapper, meta, metadataMappers, mapperService.mappingLock); Mapping mapping = new Mapping(
mapperService.getIndexSettings().getIndexVersionCreated(),
rootObjectMapper,
metadataMappers.values().toArray(new MetadataFieldMapper[metadataMappers.values().size()]),
meta);
return new DocumentMapper(mapperService, mapping);
} }
} }
@ -115,38 +114,25 @@ public class DocumentMapper implements ToXContent {
private final String type; private final String type;
private final Text typeText; private final Text typeText;
private volatile CompressedXContent mappingSource; private final CompressedXContent mappingSource;
private volatile Mapping mapping; private final Mapping mapping;
private final DocumentParser documentParser; private final DocumentParser documentParser;
private volatile DocumentFieldMappers fieldMappers; private final DocumentFieldMappers fieldMappers;
private volatile Map<String, ObjectMapper> objectMappers = Collections.emptyMap(); private final Map<String, ObjectMapper> objectMappers;
private boolean hasNestedObjects = false; private final boolean hasNestedObjects;
private final ReleasableLock mappingWriteLock; public DocumentMapper(MapperService mapperService, Mapping mapping) {
private final ReentrantReadWriteLock mappingLock;
public DocumentMapper(MapperService mapperService, @Nullable Settings indexSettings, DocumentMapperParser docMapperParser,
RootObjectMapper rootObjectMapper,
Map<String, Object> meta,
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers,
ReentrantReadWriteLock mappingLock) {
this.mapperService = mapperService; this.mapperService = mapperService;
this.type = rootObjectMapper.name(); this.type = mapping.root().name();
this.typeText = new Text(this.type); this.typeText = new Text(this.type);
this.mapping = new Mapping( final IndexSettings indexSettings = mapperService.getIndexSettings();
Version.indexCreated(indexSettings), this.mapping = mapping;
rootObjectMapper, this.documentParser = new DocumentParser(indexSettings, mapperService.documentMapperParser(), this);
metadataMappers.values().toArray(new MetadataFieldMapper[metadataMappers.values().size()]),
meta);
this.documentParser = new DocumentParser(indexSettings, docMapperParser, this, new ReleasableLock(mappingLock.readLock()));
this.mappingWriteLock = new ReleasableLock(mappingLock.writeLock());
this.mappingLock = mappingLock;
if (metadataMapper(ParentFieldMapper.class).active()) { if (metadataMapper(ParentFieldMapper.class).active()) {
// mark the routing field mapper as required // mark the routing field mapper as required
@ -163,7 +149,11 @@ public class DocumentMapper implements ToXContent {
} }
MapperUtils.collect(this.mapping.root, newObjectMappers, newFieldMappers); MapperUtils.collect(this.mapping.root, newObjectMappers, newFieldMappers);
this.fieldMappers = new DocumentFieldMappers(docMapperParser.analysisService).copyAndAllAll(newFieldMappers); final AnalysisService analysisService = mapperService.analysisService();
this.fieldMappers = new DocumentFieldMappers(newFieldMappers,
analysisService.defaultIndexAnalyzer(),
analysisService.defaultSearchAnalyzer(),
analysisService.defaultSearchQuoteAnalyzer());
Map<String, ObjectMapper> builder = new HashMap<>(); Map<String, ObjectMapper> builder = new HashMap<>();
for (ObjectMapper objectMapper : newObjectMappers) { for (ObjectMapper objectMapper : newObjectMappers) {
@ -173,14 +163,20 @@ public class DocumentMapper implements ToXContent {
} }
} }
boolean hasNestedObjects = false;
this.objectMappers = Collections.unmodifiableMap(builder); this.objectMappers = Collections.unmodifiableMap(builder);
for (ObjectMapper objectMapper : newObjectMappers) { for (ObjectMapper objectMapper : newObjectMappers) {
if (objectMapper.nested().isNested()) { if (objectMapper.nested().isNested()) {
hasNestedObjects = true; hasNestedObjects = true;
} }
} }
this.hasNestedObjects = hasNestedObjects;
refreshSource(); try {
mappingSource = new CompressedXContent(this, XContentType.JSON, ToXContent.EMPTY_PARAMS);
} catch (Exception e) {
throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e);
}
} }
public Mapping mapping() { public Mapping mapping() {
@ -334,46 +330,17 @@ public class DocumentMapper implements ToXContent {
return mapperService.getParentTypes().contains(type); return mapperService.getParentTypes().contains(type);
} }
private void addMappers(Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) { public DocumentMapper merge(Mapping mapping, boolean updateAllTypes) {
assert mappingLock.isWriteLockedByCurrentThread(); Mapping merged = this.mapping.merge(mapping, updateAllTypes);
return new DocumentMapper(mapperService, merged);
// update mappers for this document type
Map<String, ObjectMapper> builder = new HashMap<>(this.objectMappers);
for (ObjectMapper objectMapper : objectMappers) {
builder.put(objectMapper.fullPath(), objectMapper);
if (objectMapper.nested().isNested()) {
hasNestedObjects = true;
}
}
this.objectMappers = Collections.unmodifiableMap(builder);
this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers);
// finally update for the entire index
mapperService.addMappers(type, objectMappers, fieldMappers);
} }
public void merge(Mapping mapping, boolean simulate, boolean updateAllTypes) { /**
try (ReleasableLock lock = mappingWriteLock.acquire()) { * Recursively update sub field types.
mapperService.checkMappersCompatibility(type, mapping, updateAllTypes); */
// do the merge even if simulate == false so that we get exceptions public DocumentMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
Mapping merged = this.mapping.merge(mapping, updateAllTypes); Mapping updated = this.mapping.updateFieldType(fullNameToFieldType);
if (simulate == false) { return new DocumentMapper(mapperService, updated);
this.mapping = merged;
Collection<ObjectMapper> objectMappers = new ArrayList<>();
Collection<FieldMapper> fieldMappers = new ArrayList<>(Arrays.asList(merged.metadataMappers));
MapperUtils.collect(merged.root, objectMappers, fieldMappers);
addMappers(objectMappers, fieldMappers, updateAllTypes);
refreshSource();
}
}
}
private void refreshSource() throws ElasticsearchGenerationException {
try {
mappingSource = new CompressedXContent(this, XContentType.JSON, ToXContent.EMPTY_PARAMS);
} catch (Exception e) {
throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e);
}
} }
public void close() { public void close() {

View File

@ -27,7 +27,6 @@ import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
@ -46,7 +45,6 @@ import static org.elasticsearch.index.mapper.MapperBuilders.doc;
public class DocumentMapperParser { public class DocumentMapperParser {
private final Settings indexSettings;
final MapperService mapperService; final MapperService mapperService;
final AnalysisService analysisService; final AnalysisService analysisService;
private static final ESLogger logger = Loggers.getLogger(DocumentMapperParser.class); private static final ESLogger logger = Loggers.getLogger(DocumentMapperParser.class);
@ -62,8 +60,7 @@ public class DocumentMapperParser {
public DocumentMapperParser(IndexSettings indexSettings, MapperService mapperService, AnalysisService analysisService, public DocumentMapperParser(IndexSettings indexSettings, MapperService mapperService, AnalysisService analysisService,
SimilarityService similarityService, MapperRegistry mapperRegistry) { SimilarityService similarityService, MapperRegistry mapperRegistry) {
this.indexSettings = indexSettings.getSettings(); this.parseFieldMatcher = new ParseFieldMatcher(indexSettings.getSettings());
this.parseFieldMatcher = new ParseFieldMatcher(this.indexSettings);
this.mapperService = mapperService; this.mapperService = mapperService;
this.analysisService = analysisService; this.analysisService = analysisService;
this.similarityService = similarityService; this.similarityService = similarityService;
@ -110,7 +107,7 @@ public class DocumentMapperParser {
Mapper.TypeParser.ParserContext parserContext = parserContext(type); Mapper.TypeParser.ParserContext parserContext = parserContext(type);
// parse RootObjectMapper // parse RootObjectMapper
DocumentMapper.Builder docBuilder = doc(indexSettings, (RootObjectMapper.Builder) rootObjectTypeParser.parse(type, mapping, parserContext), mapperService); DocumentMapper.Builder docBuilder = doc((RootObjectMapper.Builder) rootObjectTypeParser.parse(type, mapping, parserContext), mapperService);
Iterator<Map.Entry<String, Object>> iterator = mapping.entrySet().iterator(); Iterator<Map.Entry<String, Object>> iterator = mapping.entrySet().iterator();
// parse DocumentMapper // parse DocumentMapper
while(iterator.hasNext()) { while(iterator.hasNext()) {
@ -137,7 +134,7 @@ public class DocumentMapperParser {
checkNoRemainingFields(mapping, parserContext.indexVersionCreated(), "Root mapping definition has unsupported parameters: "); checkNoRemainingFields(mapping, parserContext.indexVersionCreated(), "Root mapping definition has unsupported parameters: ");
return docBuilder.build(mapperService, this); return docBuilder.build(mapperService);
} }
public static void checkNoRemainingFields(String fieldName, Map<String, Object> fieldNodeMap, Version indexVersionCreated) { public static void checkNoRemainingFields(String fieldName, Map<String, Object> fieldNodeMap, Version indexVersionCreated) {

View File

@ -26,10 +26,9 @@ import org.apache.lucene.util.CloseableThreadLocal;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ReleasableLock;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType; import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType;
import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper;
@ -53,29 +52,21 @@ class DocumentParser implements Closeable {
private CloseableThreadLocal<ParseContext.InternalParseContext> cache = new CloseableThreadLocal<ParseContext.InternalParseContext>() { private CloseableThreadLocal<ParseContext.InternalParseContext> cache = new CloseableThreadLocal<ParseContext.InternalParseContext>() {
@Override @Override
protected ParseContext.InternalParseContext initialValue() { protected ParseContext.InternalParseContext initialValue() {
return new ParseContext.InternalParseContext(indexSettings, docMapperParser, docMapper, new ContentPath(0)); return new ParseContext.InternalParseContext(indexSettings.getSettings(), docMapperParser, docMapper, new ContentPath(0));
} }
}; };
private final Settings indexSettings; private final IndexSettings indexSettings;
private final DocumentMapperParser docMapperParser; private final DocumentMapperParser docMapperParser;
private final DocumentMapper docMapper; private final DocumentMapper docMapper;
private final ReleasableLock parseLock;
public DocumentParser(Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ReleasableLock parseLock) { public DocumentParser(IndexSettings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper) {
this.indexSettings = indexSettings; this.indexSettings = indexSettings;
this.docMapperParser = docMapperParser; this.docMapperParser = docMapperParser;
this.docMapper = docMapper; this.docMapper = docMapper;
this.parseLock = parseLock;
} }
public ParsedDocument parseDocument(SourceToParse source) throws MapperParsingException { public ParsedDocument parseDocument(SourceToParse source) throws MapperParsingException {
try (ReleasableLock lock = parseLock.acquire()){
return innerParseDocument(source);
}
}
private ParsedDocument innerParseDocument(SourceToParse source) throws MapperParsingException {
if (docMapper.type().equals(MapperService.DEFAULT_MAPPING)) { if (docMapper.type().equals(MapperService.DEFAULT_MAPPING)) {
throw new IllegalArgumentException("It is forbidden to index into the default mapping [" + MapperService.DEFAULT_MAPPING + "]"); throw new IllegalArgumentException("It is forbidden to index into the default mapping [" + MapperService.DEFAULT_MAPPING + "]");
} }
@ -132,7 +123,7 @@ class DocumentParser implements Closeable {
// try to parse the next token, this should be null if the object is ended properly // try to parse the next token, this should be null if the object is ended properly
// but will throw a JSON exception if the extra tokens is not valid JSON (this will be handled by the catch) // but will throw a JSON exception if the extra tokens is not valid JSON (this will be handled by the catch)
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1) if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)
&& source.parser() == null && parser != null) { && source.parser() == null && parser != null) {
// only check for end of tokens if we created the parser here // only check for end of tokens if we created the parser here
token = parser.nextToken(); token = parser.nextToken();

View File

@ -44,6 +44,7 @@ import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.stream.StreamSupport; import java.util.stream.StreamSupport;
public abstract class FieldMapper extends Mapper implements Cloneable { public abstract class FieldMapper extends Mapper implements Cloneable {
@ -267,7 +268,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
} }
} }
protected MappedFieldTypeReference fieldTypeRef; protected MappedFieldType fieldType;
protected final MappedFieldType defaultFieldType; protected final MappedFieldType defaultFieldType;
protected MultiFields multiFields; protected MultiFields multiFields;
protected CopyTo copyTo; protected CopyTo copyTo;
@ -277,7 +278,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
super(simpleName); super(simpleName);
assert indexSettings != null; assert indexSettings != null;
this.indexCreatedBefore2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0_beta1); this.indexCreatedBefore2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0_beta1);
this.fieldTypeRef = new MappedFieldTypeReference(fieldType); // the reference ctor freezes the field type fieldType.freeze();
this.fieldType = fieldType;
defaultFieldType.freeze(); defaultFieldType.freeze();
this.defaultFieldType = defaultFieldType; this.defaultFieldType = defaultFieldType;
this.multiFields = multiFields; this.multiFields = multiFields;
@ -290,23 +292,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
} }
public MappedFieldType fieldType() { public MappedFieldType fieldType() {
return fieldTypeRef.get(); return fieldType;
}
/** Returns a reference to the MappedFieldType for this mapper. */
public MappedFieldTypeReference fieldTypeReference() {
return fieldTypeRef;
}
/**
* Updates the reference to this field's MappedFieldType.
* Implementations should assert equality of the underlying field type
*/
public void setFieldTypeReference(MappedFieldTypeReference ref) {
if (ref.get().equals(fieldType()) == false) {
throw new IllegalStateException("Cannot overwrite field type reference to unequal reference");
}
this.fieldTypeRef = ref;
} }
/** /**
@ -350,10 +336,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
return false; return false;
} }
@Override
public Iterator<Mapper> iterator() { public Iterator<Mapper> iterator() {
if (multiFields == null) {
return Collections.emptyIterator();
}
return multiFields.iterator(); return multiFields.iterator();
} }
@ -389,12 +373,26 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
multiFields = multiFields.merge(fieldMergeWith.multiFields); multiFields = multiFields.merge(fieldMergeWith.multiFields);
// apply changeable values // apply changeable values
MappedFieldType fieldType = fieldMergeWith.fieldType().clone(); this.fieldType = fieldMergeWith.fieldType;
fieldType.freeze();
fieldTypeRef.set(fieldType);
this.copyTo = fieldMergeWith.copyTo; this.copyTo = fieldMergeWith.copyTo;
} }
@Override
public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
final MappedFieldType newFieldType = fullNameToFieldType.get(fieldType.names().fullName());
if (newFieldType == null) {
throw new IllegalStateException();
}
MultiFields updatedMultiFields = multiFields.updateFieldType(fullNameToFieldType);
if (fieldType == newFieldType && multiFields == updatedMultiFields) {
return this; // no change
}
FieldMapper updated = clone();
updated.fieldType = newFieldType;
updated.multiFields = updatedMultiFields;
return updated;
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(simpleName()); builder.startObject(simpleName());
@ -619,6 +617,27 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
return new MultiFields(mappers); return new MultiFields(mappers);
} }
public MultiFields updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
ImmutableOpenMap.Builder<String, FieldMapper> newMappersBuilder = null;
for (ObjectCursor<FieldMapper> cursor : mappers.values()) {
FieldMapper updated = cursor.value.updateFieldType(fullNameToFieldType);
if (updated != cursor.value) {
if (newMappersBuilder == null) {
newMappersBuilder = ImmutableOpenMap.builder(mappers);
}
newMappersBuilder.put(updated.simpleName(), updated);
}
}
if (newMappersBuilder == null) {
return this;
}
ImmutableOpenMap<String, FieldMapper> mappers = newMappersBuilder.build();
return new MultiFields(mappers);
}
public Iterator<Mapper> iterator() { public Iterator<Mapper> iterator() {
return StreamSupport.stream(mappers.values().spliterator(), false).map((p) -> (Mapper)p.value).iterator(); return StreamSupport.stream(mappers.values().spliterator(), false).map((p) -> (Mapper)p.value).iterator();
} }

View File

@ -37,16 +37,16 @@ import java.util.Set;
class FieldTypeLookup implements Iterable<MappedFieldType> { class FieldTypeLookup implements Iterable<MappedFieldType> {
/** Full field name to field type */ /** Full field name to field type */
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> fullNameToFieldType; final CopyOnWriteHashMap<String, MappedFieldType> fullNameToFieldType;
/** Full field name to types containing a mapping for this full name. */ /** Full field name to types containing a mapping for this full name. */
private final CopyOnWriteHashMap<String, Set<String>> fullNameToTypes; final CopyOnWriteHashMap<String, Set<String>> fullNameToTypes;
/** Index field name to field type */ /** Index field name to field type */
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> indexNameToFieldType; final CopyOnWriteHashMap<String, MappedFieldType> indexNameToFieldType;
/** Index field name to types containing a mapping for this index name. */ /** Index field name to types containing a mapping for this index name. */
private final CopyOnWriteHashMap<String, Set<String>> indexNameToTypes; final CopyOnWriteHashMap<String, Set<String>> indexNameToTypes;
/** Create a new empty instance. */ /** Create a new empty instance. */
public FieldTypeLookup() { public FieldTypeLookup() {
@ -57,9 +57,9 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
} }
private FieldTypeLookup( private FieldTypeLookup(
CopyOnWriteHashMap<String, MappedFieldTypeReference> fullName, CopyOnWriteHashMap<String, MappedFieldType> fullName,
CopyOnWriteHashMap<String, Set<String>> fullNameToTypes, CopyOnWriteHashMap<String, Set<String>> fullNameToTypes,
CopyOnWriteHashMap<String, MappedFieldTypeReference> indexName, CopyOnWriteHashMap<String, MappedFieldType> indexName,
CopyOnWriteHashMap<String, Set<String>> indexNameToTypes) { CopyOnWriteHashMap<String, Set<String>> indexNameToTypes) {
this.fullNameToFieldType = fullName; this.fullNameToFieldType = fullName;
this.fullNameToTypes = fullNameToTypes; this.fullNameToTypes = fullNameToTypes;
@ -89,43 +89,35 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
* from the provided fields. If a field already exists, the field type will be updated * from the provided fields. If a field already exists, the field type will be updated
* to use the new mappers field type. * to use the new mappers field type.
*/ */
public FieldTypeLookup copyAndAddAll(String type, Collection<FieldMapper> newFieldMappers) { public FieldTypeLookup copyAndAddAll(String type, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) {
Objects.requireNonNull(type, "type must not be null"); Objects.requireNonNull(type, "type must not be null");
if (MapperService.DEFAULT_MAPPING.equals(type)) { if (MapperService.DEFAULT_MAPPING.equals(type)) {
throw new IllegalArgumentException("Default mappings should not be added to the lookup"); throw new IllegalArgumentException("Default mappings should not be added to the lookup");
} }
CopyOnWriteHashMap<String, MappedFieldTypeReference> fullName = this.fullNameToFieldType;
CopyOnWriteHashMap<String, MappedFieldType> fullName = this.fullNameToFieldType;
CopyOnWriteHashMap<String, Set<String>> fullNameToTypes = this.fullNameToTypes; CopyOnWriteHashMap<String, Set<String>> fullNameToTypes = this.fullNameToTypes;
CopyOnWriteHashMap<String, MappedFieldTypeReference> indexName = this.indexNameToFieldType; CopyOnWriteHashMap<String, MappedFieldType> indexName = this.indexNameToFieldType;
CopyOnWriteHashMap<String, Set<String>> indexNameToTypes = this.indexNameToTypes; CopyOnWriteHashMap<String, Set<String>> indexNameToTypes = this.indexNameToTypes;
for (FieldMapper fieldMapper : newFieldMappers) { for (FieldMapper fieldMapper : fieldMappers) {
MappedFieldType fieldType = fieldMapper.fieldType(); MappedFieldType fieldType = fieldMapper.fieldType();
MappedFieldTypeReference fullNameRef = fullName.get(fieldType.names().fullName()); MappedFieldType fullNameFieldType = fullName.get(fieldType.names().fullName());
MappedFieldTypeReference indexNameRef = indexName.get(fieldType.names().indexName()); MappedFieldType indexNameFieldType = indexName.get(fieldType.names().indexName());
if (fullNameRef == null && indexNameRef == null) {
// new field, just use the ref from this field mapper if (fullNameFieldType != null && indexNameFieldType != null && fullNameFieldType != indexNameFieldType) {
fullName = fullName.copyAndPut(fieldType.names().fullName(), fieldMapper.fieldTypeReference());
indexName = indexName.copyAndPut(fieldType.names().indexName(), fieldMapper.fieldTypeReference());
} else if (fullNameRef == null) {
// this index name already exists, so copy over the reference
fullName = fullName.copyAndPut(fieldType.names().fullName(), indexNameRef);
indexNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed
fieldMapper.setFieldTypeReference(indexNameRef);
} else if (indexNameRef == null) {
// this full name already exists, so copy over the reference
indexName = indexName.copyAndPut(fieldType.names().indexName(), fullNameRef);
fullNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed
fieldMapper.setFieldTypeReference(fullNameRef);
} else if (fullNameRef == indexNameRef) {
// the field already exists, so replace the reference in this mapper with the pre-existing one
fullNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed
fieldMapper.setFieldTypeReference(fullNameRef);
} else {
// this new field bridges between two existing field names (a full and index name), which we cannot support // this new field bridges between two existing field names (a full and index name), which we cannot support
throw new IllegalStateException("insane mappings found. field " + fieldType.names().fullName() + " maps across types to field " + fieldType.names().indexName()); throw new IllegalStateException("insane mappings found. field " + fieldType.names().fullName() + " maps across types to field " + fieldType.names().indexName());
} }
// is the update even legal?
checkCompatibility(type, fieldMapper, updateAllTypes);
if (fieldType != fullNameFieldType || fieldType != indexNameFieldType) {
fullName = fullName.copyAndPut(fieldType.names().fullName(), fieldMapper.fieldType());
indexName = indexName.copyAndPut(fieldType.names().indexName(), fieldMapper.fieldType());
}
fullNameToTypes = addType(fullNameToTypes, fieldType.names().fullName(), type); fullNameToTypes = addType(fullNameToTypes, fieldType.names().fullName(), type);
indexNameToTypes = addType(indexNameToTypes, fieldType.names().indexName(), type); indexNameToTypes = addType(indexNameToTypes, fieldType.names().indexName(), type);
} }
@ -145,42 +137,38 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
} }
/** /**
* Checks if the given mappers' field types are compatible with existing field types. * Checks if the given field type is compatible with an existing field type.
* If any are not compatible, an IllegalArgumentException is thrown. * An IllegalArgumentException is thrown in case of incompatibility.
* If updateAllTypes is true, only basic compatibility is checked. * If updateAllTypes is true, only basic compatibility is checked.
*/ */
public void checkCompatibility(String type, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) { private void checkCompatibility(String type, FieldMapper fieldMapper, boolean updateAllTypes) {
for (FieldMapper fieldMapper : fieldMappers) { MappedFieldType fieldType = fullNameToFieldType.get(fieldMapper.fieldType().names().fullName());
MappedFieldTypeReference ref = fullNameToFieldType.get(fieldMapper.fieldType().names().fullName()); if (fieldType != null) {
if (ref != null) { List<String> conflicts = new ArrayList<>();
List<String> conflicts = new ArrayList<>(); final Set<String> types = fullNameToTypes.get(fieldMapper.fieldType().names().fullName());
final Set<String> types = fullNameToTypes.get(fieldMapper.fieldType().names().fullName()); boolean strict = beStrict(type, types, updateAllTypes);
boolean strict = beStrict(type, types, updateAllTypes); fieldType.checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
ref.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict); if (conflicts.isEmpty() == false) {
if (conflicts.isEmpty() == false) { throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with existing mapping in other types:\n" + conflicts.toString());
throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with existing mapping in other types:\n" + conflicts.toString());
}
} }
}
// field type for the index name must be compatible too // field type for the index name must be compatible too
MappedFieldTypeReference indexNameRef = indexNameToFieldType.get(fieldMapper.fieldType().names().indexName()); fieldType = indexNameToFieldType.get(fieldMapper.fieldType().names().indexName());
if (indexNameRef != null) { if (fieldType != null) {
List<String> conflicts = new ArrayList<>(); List<String> conflicts = new ArrayList<>();
final Set<String> types = indexNameToTypes.get(fieldMapper.fieldType().names().indexName()); final Set<String> types = indexNameToTypes.get(fieldMapper.fieldType().names().indexName());
boolean strict = beStrict(type, types, updateAllTypes); boolean strict = beStrict(type, types, updateAllTypes);
indexNameRef.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict); fieldType.checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
if (conflicts.isEmpty() == false) { if (conflicts.isEmpty() == false) {
throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with mapping with the same index name in other types" + conflicts.toString()); throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with mapping with the same index name in other types" + conflicts.toString());
}
} }
} }
} }
/** Returns the field for the given field */ /** Returns the field for the given field */
public MappedFieldType get(String field) { public MappedFieldType get(String field) {
MappedFieldTypeReference ref = fullNameToFieldType.get(field); return fullNameToFieldType.get(field);
if (ref == null) return null;
return ref.get();
} }
/** Get the set of types that have a mapping for the given field. */ /** Get the set of types that have a mapping for the given field. */
@ -194,9 +182,7 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
/** Returns the field type for the given index name */ /** Returns the field type for the given index name */
public MappedFieldType getByIndexName(String field) { public MappedFieldType getByIndexName(String field) {
MappedFieldTypeReference ref = indexNameToFieldType.get(field); return indexNameToFieldType.get(field);
if (ref == null) return null;
return ref.get();
} }
/** Get the set of types that have a mapping for the given field. */ /** Get the set of types that have a mapping for the given field. */
@ -238,7 +224,8 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
return fields; return fields;
} }
@Override
public Iterator<MappedFieldType> iterator() { public Iterator<MappedFieldType> iterator() {
return fullNameToFieldType.values().stream().map((p) -> p.get()).iterator(); return fullNameToFieldType.values().iterator();
} }
} }

View File

@ -1,41 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
/**
* A container for a {@link MappedFieldType} which can be updated and is reference counted.
*/
public class MappedFieldTypeReference {
private MappedFieldType fieldType; // the current field type this reference points to
public MappedFieldTypeReference(MappedFieldType fieldType) {
fieldType.freeze(); // ensure frozen
this.fieldType = fieldType;
}
public MappedFieldType get() {
return fieldType;
}
public void set(MappedFieldType fieldType) {
fieldType.freeze(); // ensure frozen
this.fieldType = fieldType;
}
}

View File

@ -177,4 +177,11 @@ public abstract class Mapper implements ToXContent, Iterable<Mapper> {
/** Return the merge of {@code mergeWith} into this. /** Return the merge of {@code mergeWith} into this.
* Both {@code this} and {@code mergeWith} will be left unmodified. */ * Both {@code this} and {@code mergeWith} will be left unmodified. */
public abstract Mapper merge(Mapper mergeWith, boolean updateAllTypes); public abstract Mapper merge(Mapper mergeWith, boolean updateAllTypes);
/**
* Update the field type of this mapper. This is necessary because some mapping updates
* can modify mappings across several types. This method must return a copy of the mapper
* so that the current mapper is not modified.
*/
public abstract Mapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType);
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.core.BinaryFieldMapper; import org.elasticsearch.index.mapper.core.BinaryFieldMapper;
import org.elasticsearch.index.mapper.core.BooleanFieldMapper; import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
import org.elasticsearch.index.mapper.core.ByteFieldMapper; import org.elasticsearch.index.mapper.core.ByteFieldMapper;
@ -41,8 +40,8 @@ public final class MapperBuilders {
private MapperBuilders() {} private MapperBuilders() {}
public static DocumentMapper.Builder doc(Settings settings, RootObjectMapper.Builder objectBuilder, MapperService mapperService) { public static DocumentMapper.Builder doc(RootObjectMapper.Builder objectBuilder, MapperService mapperService) {
return new DocumentMapper.Builder(settings, objectBuilder, mapperService); return new DocumentMapper.Builder(objectBuilder, mapperService);
} }
public static RootObjectMapper.Builder rootObject(String name) { public static RootObjectMapper.Builder rootObject(String name) {

View File

@ -35,11 +35,9 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.util.concurrent.ReleasableLock;
import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.AnalysisService;
@ -65,7 +63,6 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -98,12 +95,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
private volatile Map<String, DocumentMapper> mappers = emptyMap(); private volatile Map<String, DocumentMapper> mappers = emptyMap();
// A lock for mappings: modifications (put mapping) need to be performed
// under the write lock and read operations (document parsing) need to be
// performed under the read lock
final ReentrantReadWriteLock mappingLock = new ReentrantReadWriteLock();
private final ReleasableLock mappingWriteLock = new ReleasableLock(mappingLock.writeLock());
private volatile FieldTypeLookup fieldTypes; private volatile FieldTypeLookup fieldTypes;
private volatile Map<String, ObjectMapper> fullPathObjectMappers = new HashMap<>(); private volatile Map<String, ObjectMapper> fullPathObjectMappers = new HashMap<>();
private boolean hasNested = false; // updated dynamically to true when a nested object is added private boolean hasNested = false; // updated dynamically to true when a nested object is added
@ -216,7 +207,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
DocumentMapper mapper = documentParser.parse(type, mappingSource); DocumentMapper mapper = documentParser.parse(type, mappingSource);
// still add it as a document mapper so we have it registered and, for example, persisted back into // still add it as a document mapper so we have it registered and, for example, persisted back into
// the cluster meta data if needed, or checked for existence // the cluster meta data if needed, or checked for existence
try (ReleasableLock lock = mappingWriteLock.acquire()) { synchronized (this) {
mappers = newMapBuilder(mappers).put(type, mapper).map(); mappers = newMapBuilder(mappers).put(type, mapper).map();
} }
try { try {
@ -226,7 +217,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
} }
return mapper; return mapper;
} else { } else {
try (ReleasableLock lock = mappingWriteLock.acquire()) { synchronized (this) {
// only apply the default mapping if we don't have the type yet // only apply the default mapping if we don't have the type yet
applyDefault &= mappers.containsKey(type) == false; applyDefault &= mappers.containsKey(type) == false;
return merge(parse(type, mappingSource, applyDefault), updateAllTypes); return merge(parse(type, mappingSource, applyDefault), updateAllTypes);
@ -234,9 +225,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
} }
} }
// never expose this to the outside world, we need to reparse the doc mapper so we get fresh private synchronized DocumentMapper merge(DocumentMapper mapper, boolean updateAllTypes) {
// instances of field mappers to properly remove existing doc mapper
private DocumentMapper merge(DocumentMapper mapper, boolean updateAllTypes) {
if (mapper.type().length() == 0) { if (mapper.type().length() == 0) {
throw new InvalidTypeNameException("mapping type name is empty"); throw new InvalidTypeNameException("mapping type name is empty");
} }
@ -262,34 +251,89 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
logger.warn("Type [{}] starts with a '.', it is recommended not to start a type name with a '.'", mapper.type()); logger.warn("Type [{}] starts with a '.', it is recommended not to start a type name with a '.'", mapper.type());
} }
} }
// we can add new field/object mappers while the old ones are there
// since we get new instances of those, and when we remove, we remove // 1. compute the merged DocumentMapper
// by instance equality
DocumentMapper oldMapper = mappers.get(mapper.type()); DocumentMapper oldMapper = mappers.get(mapper.type());
DocumentMapper newMapper;
if (oldMapper != null) { if (oldMapper != null) {
oldMapper.merge(mapper.mapping(), false, updateAllTypes); newMapper = oldMapper.merge(mapper.mapping(), updateAllTypes);
return oldMapper;
} else { } else {
Tuple<Collection<ObjectMapper>, Collection<FieldMapper>> newMappers = checkMappersCompatibility( newMapper = mapper;
mapper.type(), mapper.mapping(), updateAllTypes); }
Collection<ObjectMapper> newObjectMappers = newMappers.v1();
Collection<FieldMapper> newFieldMappers = newMappers.v2();
addMappers(mapper.type(), newObjectMappers, newFieldMappers);
// 2. check basic sanity of the new mapping
List<ObjectMapper> objectMappers = new ArrayList<>();
List<FieldMapper> fieldMappers = new ArrayList<>();
Collections.addAll(fieldMappers, newMapper.mapping().metadataMappers);
MapperUtils.collect(newMapper.mapping().root(), objectMappers, fieldMappers);
checkFieldUniqueness(newMapper.type(), objectMappers, fieldMappers);
checkObjectsCompatibility(newMapper.type(), objectMappers, fieldMappers, updateAllTypes);
// 3. update lookup data-structures
// this will in particular make sure that the merged fields are compatible with other types
FieldTypeLookup fieldTypes = this.fieldTypes.copyAndAddAll(newMapper.type(), fieldMappers, updateAllTypes);
boolean hasNested = this.hasNested;
Map<String, ObjectMapper> fullPathObjectMappers = new HashMap<>(this.fullPathObjectMappers);
for (ObjectMapper objectMapper : objectMappers) {
fullPathObjectMappers.put(objectMapper.fullPath(), objectMapper);
if (objectMapper.nested().isNested()) {
hasNested = true;
}
}
fullPathObjectMappers = Collections.unmodifiableMap(fullPathObjectMappers);
Set<String> parentTypes = this.parentTypes;
if (oldMapper == null && newMapper.parentFieldMapper().active()) {
parentTypes = new HashSet<>(parentTypes.size() + 1);
parentTypes.addAll(this.parentTypes);
parentTypes.add(mapper.parentFieldMapper().type());
parentTypes = Collections.unmodifiableSet(parentTypes);
}
Map<String, DocumentMapper> mappers = new HashMap<>(this.mappers);
mappers.put(newMapper.type(), newMapper);
for (Map.Entry<String, DocumentMapper> entry : mappers.entrySet()) {
if (entry.getKey().equals(DEFAULT_MAPPING)) {
continue;
}
DocumentMapper m = entry.getValue();
// apply changes to the field types back
m = m.updateFieldType(fieldTypes.fullNameToFieldType);
entry.setValue(m);
}
mappers = Collections.unmodifiableMap(mappers);
// 4. commit the change
this.mappers = mappers;
this.fieldTypes = fieldTypes;
this.hasNested = hasNested;
this.fullPathObjectMappers = fullPathObjectMappers;
this.parentTypes = parentTypes;
// 5. send notifications about the change
if (oldMapper == null) {
// means the mapping was created
for (DocumentTypeListener typeListener : typeListeners) { for (DocumentTypeListener typeListener : typeListeners) {
typeListener.beforeCreate(mapper); typeListener.beforeCreate(mapper);
} }
mappers = newMapBuilder(mappers).put(mapper.type(), mapper).map();
if (mapper.parentFieldMapper().active()) {
Set<String> newParentTypes = new HashSet<>(parentTypes.size() + 1);
newParentTypes.addAll(parentTypes);
newParentTypes.add(mapper.parentFieldMapper().type());
parentTypes = unmodifiableSet(newParentTypes);
}
assert assertSerialization(mapper);
return mapper;
} }
assert assertSerialization(newMapper);
assert assertMappersShareSameFieldType();
return newMapper;
}
private boolean assertMappersShareSameFieldType() {
for (DocumentMapper mapper : docMappers(false)) {
List<FieldMapper> fieldMappers = new ArrayList<>();
Collections.addAll(fieldMappers, mapper.mapping().metadataMappers);
MapperUtils.collect(mapper.root(), new ArrayList<ObjectMapper>(), fieldMappers);
for (FieldMapper fieldMapper : fieldMappers) {
assert fieldMapper.fieldType() == fieldTypes.get(fieldMapper.name()) : fieldMapper.name();
}
}
return true;
} }
private boolean typeNameStartsWithIllegalDot(DocumentMapper mapper) { private boolean typeNameStartsWithIllegalDot(DocumentMapper mapper) {
@ -339,8 +383,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
} }
} }
protected void checkMappersCompatibility(String type, Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) { private void checkObjectsCompatibility(String type, Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) {
assert mappingLock.isWriteLockedByCurrentThread(); assert Thread.holdsLock(this);
checkFieldUniqueness(type, objectMappers, fieldMappers); checkFieldUniqueness(type, objectMappers, fieldMappers);
@ -358,31 +402,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
throw new IllegalArgumentException("Field [" + fieldMapper.name() + "] is defined as a field in mapping [" + type + "] but this name is already used for an object in other types"); throw new IllegalArgumentException("Field [" + fieldMapper.name() + "] is defined as a field in mapping [" + type + "] but this name is already used for an object in other types");
} }
} }
fieldTypes.checkCompatibility(type, fieldMappers, updateAllTypes);
}
protected Tuple<Collection<ObjectMapper>, Collection<FieldMapper>> checkMappersCompatibility(
String type, Mapping mapping, boolean updateAllTypes) {
List<ObjectMapper> objectMappers = new ArrayList<>();
List<FieldMapper> fieldMappers = new ArrayList<>();
Collections.addAll(fieldMappers, mapping.metadataMappers);
MapperUtils.collect(mapping.root, objectMappers, fieldMappers);
checkMappersCompatibility(type, objectMappers, fieldMappers, updateAllTypes);
return new Tuple<>(objectMappers, fieldMappers);
}
protected void addMappers(String type, Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers) {
assert mappingLock.isWriteLockedByCurrentThread();
Map<String, ObjectMapper> fullPathObjectMappers = new HashMap<>(this.fullPathObjectMappers);
for (ObjectMapper objectMapper : objectMappers) {
fullPathObjectMappers.put(objectMapper.fullPath(), objectMapper);
if (objectMapper.nested().isNested()) {
hasNested = true;
}
}
this.fullPathObjectMappers = Collections.unmodifiableMap(fullPathObjectMappers);
this.fieldTypes = this.fieldTypes.copyAndAddAll(type, fieldMappers);
} }
public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException { public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException {

View File

@ -93,7 +93,7 @@ public final class Mapping implements ToXContent {
return (T) metadataMappersMap.get(clazz); return (T) metadataMappersMap.get(clazz);
} }
/** @see DocumentMapper#merge(Mapping, boolean, boolean) */ /** @see DocumentMapper#merge(Mapping, boolean) */
public Mapping merge(Mapping mergeWith, boolean updateAllTypes) { public Mapping merge(Mapping mergeWith, boolean updateAllTypes) {
RootObjectMapper mergedRoot = root.merge(mergeWith.root, updateAllTypes); RootObjectMapper mergedRoot = root.merge(mergeWith.root, updateAllTypes);
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> mergedMetaDataMappers = new HashMap<>(metadataMappersMap); Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> mergedMetaDataMappers = new HashMap<>(metadataMappersMap);
@ -110,6 +110,18 @@ public final class Mapping implements ToXContent {
return new Mapping(indexCreated, mergedRoot, mergedMetaDataMappers.values().toArray(new MetadataFieldMapper[0]), mergeWith.meta); return new Mapping(indexCreated, mergedRoot, mergedMetaDataMappers.values().toArray(new MetadataFieldMapper[0]), mergeWith.meta);
} }
/**
* Recursively update sub field types.
*/
public Mapping updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
final MetadataFieldMapper[] updatedMeta = Arrays.copyOf(metadataMappers, metadataMappers.length);
for (int i = 0; i < updatedMeta.length; ++i) {
updatedMeta[i] = (MetadataFieldMapper) updatedMeta[i].updateFieldType(fullNameToFieldType);
}
RootObjectMapper updatedRoot = root.updateFieldType(fullNameToFieldType);
return new Mapping(indexCreated, updatedRoot, updatedMeta, meta);
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
root.toXContent(builder, params, new ToXContent() { root.toXContent(builder, params, new ToXContent() {

View File

@ -346,11 +346,11 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
} }
} }
protected final DoubleFieldMapper latMapper; protected DoubleFieldMapper latMapper;
protected final DoubleFieldMapper lonMapper; protected DoubleFieldMapper lonMapper;
protected final StringFieldMapper geoHashMapper; protected StringFieldMapper geoHashMapper;
protected Explicit<Boolean> ignoreMalformed; protected Explicit<Boolean> ignoreMalformed;
@ -504,4 +504,25 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value()); builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value());
} }
} }
@Override
public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
BaseGeoPointFieldMapper updated = (BaseGeoPointFieldMapper) super.updateFieldType(fullNameToFieldType);
StringFieldMapper geoUpdated = geoHashMapper == null ? null : (StringFieldMapper) geoHashMapper.updateFieldType(fullNameToFieldType);
DoubleFieldMapper latUpdated = latMapper == null ? null : (DoubleFieldMapper) latMapper.updateFieldType(fullNameToFieldType);
DoubleFieldMapper lonUpdated = lonMapper == null ? null : (DoubleFieldMapper) lonMapper.updateFieldType(fullNameToFieldType);
if (updated == this
&& geoUpdated == geoHashMapper
&& latUpdated == latMapper
&& lonUpdated == lonMapper) {
return this;
}
if (updated == this) {
updated = (BaseGeoPointFieldMapper) updated.clone();
}
updated.geoHashMapper = geoUpdated;
updated.latMapper = latUpdated;
updated.lonMapper = lonUpdated;
return updated;
}
} }

View File

@ -216,7 +216,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
FieldNamesFieldType newFieldType = fieldType().clone(); FieldNamesFieldType newFieldType = fieldType().clone();
newFieldType.setEnabled(false); newFieldType.setEnabled(false);
newFieldType.freeze(); newFieldType.freeze();
fieldTypeRef.set(newFieldType); this.fieldType = newFieldType;
} }
} }

View File

@ -95,8 +95,8 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
private boolean explicitStore = false; private boolean explicitStore = false;
private Boolean ignoreMissing = null; private Boolean ignoreMissing = null;
public Builder(MappedFieldType existing) { public Builder(MappedFieldType existing, Settings settings) {
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, chooseFieldType(settings, null));
if (existing != null) { if (existing != null) {
// if there is an existing type, always use that store value (only matters for < 2.0) // if there is an existing type, always use that store value (only matters for < 2.0)
explicitStore = true; explicitStore = true;
@ -167,7 +167,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
public static class TypeParser implements MetadataFieldMapper.TypeParser { public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override @Override
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); Builder builder = new Builder(parserContext.mapperService().fullName(NAME), parserContext.mapperService().getIndexSettings().getSettings());
if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
parseField(builder, builder.name, node, parserContext); parseField(builder, builder.name, node, parserContext);
} }
@ -260,7 +260,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
private final Boolean ignoreMissing; private final Boolean ignoreMissing;
private TimestampFieldMapper(Settings indexSettings, MappedFieldType existing) { private TimestampFieldMapper(Settings indexSettings, MappedFieldType existing) {
this(chooseFieldType(indexSettings, existing).clone(), chooseFieldType(indexSettings, null), Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP, null, indexSettings); this(chooseFieldType(indexSettings, existing).clone(), chooseFieldType(indexSettings, null).clone(), Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP, null, indexSettings);
} }
private TimestampFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, EnabledAttributeMapper enabledState, String path, private TimestampFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, EnabledAttributeMapper enabledState, String path,

View File

@ -31,6 +31,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.MetadataFieldMapper;
@ -493,6 +494,28 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
} }
} }
@Override
public ObjectMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
List<Mapper> updatedMappers = null;
for (Mapper mapper : this) {
Mapper updated = mapper.updateFieldType(fullNameToFieldType);
if (mapper != updated) {
if (updatedMappers == null) {
updatedMappers = new ArrayList<>();
}
updatedMappers.add(updated);
}
}
if (updatedMappers == null) {
return this;
}
ObjectMapper updated = clone();
for (Mapper updatedMapper : updatedMappers) {
updated.putMapper(updatedMapper);
}
return updated;
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
toXContent(builder, params, null); toXContent(builder, params, null);

View File

@ -27,6 +27,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext;
@ -295,6 +296,11 @@ public class RootObjectMapper extends ObjectMapper {
this.dynamicTemplates = mergedTemplates.toArray(new DynamicTemplate[mergedTemplates.size()]); this.dynamicTemplates = mergedTemplates.toArray(new DynamicTemplate[mergedTemplates.size()]);
} }
@Override
public RootObjectMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
return (RootObjectMapper) super.updateFieldType(fullNameToFieldType);
}
@Override @Override
protected void doXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { protected void doXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
if (dynamicDateTimeFormatters != Defaults.DYNAMIC_DATE_TIME_FORMATTERS) { if (dynamicDateTimeFormatters != Defaults.DYNAMIC_DATE_TIME_FORMATTERS) {

View File

@ -1931,9 +1931,8 @@ public class InternalEngineTests extends ESTestCase {
SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap());
MapperRegistry mapperRegistry = new IndicesModule().getMapperRegistry(); MapperRegistry mapperRegistry = new IndicesModule().getMapperRegistry();
MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry); MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry);
DocumentMapper.Builder b = new DocumentMapper.Builder(settings, rootBuilder, mapperService); DocumentMapper.Builder b = new DocumentMapper.Builder(rootBuilder, mapperService);
DocumentMapperParser parser = mapperService.documentMapperParser(); this.docMapper = b.build(mapperService);
this.docMapper = b.build(mapperService, parser);
} }
@Override @Override

View File

@ -0,0 +1,144 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.document.Field;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import java.io.IOException;
import java.io.StringReader;
import java.util.Arrays;
import java.util.List;
public class DocumentFieldMapperTests extends LuceneTestCase {
private static class FakeAnalyzer extends Analyzer {
private final String output;
public FakeAnalyzer(String output) {
this.output = output;
}
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new Tokenizer() {
boolean incremented = false;
CharTermAttribute term = addAttribute(CharTermAttribute.class);
@Override
public boolean incrementToken() throws IOException {
if (incremented) {
return false;
}
term.setLength(0).append(output);
incremented = true;
return true;
}
};
return new TokenStreamComponents(tokenizer);
}
}
static class FakeFieldType extends MappedFieldType {
public FakeFieldType() {
super();
}
FakeFieldType(FakeFieldType other) {
super(other);
}
@Override
public MappedFieldType clone() {
return new FakeFieldType(this);
}
@Override
public String typeName() {
return "fake";
}
}
static class FakeFieldMapper extends FieldMapper {
private static final Settings SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
public FakeFieldMapper(String simpleName, MappedFieldType fieldType) {
super(simpleName, fieldType.clone(), fieldType.clone(), SETTINGS, null, null);
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
}
@Override
protected String contentType() {
return null;
}
}
public void testAnalyzers() throws IOException {
FakeFieldType fieldType1 = new FakeFieldType();
fieldType1.setNames(new MappedFieldType.Names("field1"));
fieldType1.setIndexAnalyzer(new NamedAnalyzer("foo", new FakeAnalyzer("index")));
fieldType1.setSearchAnalyzer(new NamedAnalyzer("bar", new FakeAnalyzer("search")));
fieldType1.setSearchQuoteAnalyzer(new NamedAnalyzer("baz", new FakeAnalyzer("search_quote")));
FieldMapper fieldMapper1 = new FakeFieldMapper("field1", fieldType1);
FakeFieldType fieldType2 = new FakeFieldType();
fieldType2.setNames(new MappedFieldType.Names("field2"));
FieldMapper fieldMapper2 = new FakeFieldMapper("field2", fieldType2);
Analyzer defaultIndex = new FakeAnalyzer("default_index");
Analyzer defaultSearch = new FakeAnalyzer("default_search");
Analyzer defaultSearchQuote = new FakeAnalyzer("default_search_quote");
DocumentFieldMappers documentFieldMappers = new DocumentFieldMappers(Arrays.asList(fieldMapper1, fieldMapper2), defaultIndex, defaultSearch, defaultSearchQuote);
assertAnalyzes(documentFieldMappers.indexAnalyzer(), "field1", "index");
assertAnalyzes(documentFieldMappers.searchAnalyzer(), "field1", "search");
assertAnalyzes(documentFieldMappers.searchQuoteAnalyzer(), "field1", "search_quote");
assertAnalyzes(documentFieldMappers.indexAnalyzer(), "field2", "default_index");
assertAnalyzes(documentFieldMappers.searchAnalyzer(), "field2", "default_search");
assertAnalyzes(documentFieldMappers.searchQuoteAnalyzer(), "field2", "default_search_quote");
}
private void assertAnalyzes(Analyzer analyzer, String field, String output) throws IOException {
try (TokenStream tok = analyzer.tokenStream(field, new StringReader(""))) {
CharTermAttribute term = tok.addAttribute(CharTermAttribute.class);
assertTrue(tok.incrementToken());
assertEquals(output, term.toString());
}
}
}

View File

@ -31,6 +31,8 @@ import java.util.Collections;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import static org.hamcrest.Matchers.containsString;
public class FieldTypeLookupTests extends ESTestCase { public class FieldTypeLookupTests extends ESTestCase {
public void testEmpty() { public void testEmpty() {
@ -53,7 +55,7 @@ public class FieldTypeLookupTests extends ESTestCase {
public void testDefaultMapping() { public void testDefaultMapping() {
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
try { try {
lookup.copyAndAddAll(MapperService.DEFAULT_MAPPING, Collections.emptyList()); lookup.copyAndAddAll(MapperService.DEFAULT_MAPPING, Collections.emptyList(), randomBoolean());
fail(); fail();
} catch (IllegalArgumentException expected) { } catch (IllegalArgumentException expected) {
assertEquals("Default mappings should not be added to the lookup", expected.getMessage()); assertEquals("Default mappings should not be added to the lookup", expected.getMessage());
@ -63,7 +65,7 @@ public class FieldTypeLookupTests extends ESTestCase {
public void testAddNewField() { public void testAddNewField() {
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
FakeFieldMapper f = new FakeFieldMapper("foo", "bar"); FakeFieldMapper f = new FakeFieldMapper("foo", "bar");
FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f)); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f), randomBoolean());
assertNull(lookup.get("foo")); assertNull(lookup.get("foo"));
assertNull(lookup.get("bar")); assertNull(lookup.get("bar"));
assertNull(lookup.getByIndexName("foo")); assertNull(lookup.getByIndexName("foo"));
@ -85,94 +87,77 @@ public class FieldTypeLookupTests extends ESTestCase {
public void testAddExistingField() { public void testAddExistingField() {
FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); FakeFieldMapper f = new FakeFieldMapper("foo", "foo");
MappedFieldType originalFieldType = f.fieldType();
FakeFieldMapper f2 = new FakeFieldMapper("foo", "foo"); FakeFieldMapper f2 = new FakeFieldMapper("foo", "foo");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type1", newList(f)); lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean());
FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2)); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean());
assertNotSame(originalFieldType, f.fieldType()); assertSame(f2.fieldType(), lookup2.get("foo"));
assertSame(f.fieldType(), f2.fieldType()); assertSame(f2.fieldType(), lookup2.getByIndexName("foo"));
assertSame(f.fieldType(), lookup2.get("foo"));
assertSame(f.fieldType(), lookup2.getByIndexName("foo"));
assertEquals(1, size(lookup2.iterator())); assertEquals(1, size(lookup2.iterator()));
} }
public void testAddExistingIndexName() { public void testAddExistingIndexName() {
FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); FakeFieldMapper f = new FakeFieldMapper("foo", "foo");
FakeFieldMapper f2 = new FakeFieldMapper("bar", "foo"); FakeFieldMapper f2 = new FakeFieldMapper("bar", "foo");
MappedFieldType originalFieldType = f.fieldType();
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type1", newList(f)); lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean());
FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2)); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean());
assertNotSame(originalFieldType, f.fieldType());
assertSame(f.fieldType(), f2.fieldType());
assertSame(f.fieldType(), lookup2.get("foo")); assertSame(f.fieldType(), lookup2.get("foo"));
assertSame(f.fieldType(), lookup2.get("bar")); assertSame(f2.fieldType(), lookup2.get("bar"));
assertSame(f.fieldType(), lookup2.getByIndexName("foo")); assertSame(f2.fieldType(), lookup2.getByIndexName("foo"));
assertEquals(2, size(lookup2.iterator())); assertEquals(2, size(lookup2.iterator()));
} }
public void testAddExistingFullName() { public void testAddExistingFullName() {
FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); FakeFieldMapper f = new FakeFieldMapper("foo", "foo");
FakeFieldMapper f2 = new FakeFieldMapper("foo", "bar"); FakeFieldMapper f2 = new FakeFieldMapper("foo", "bar");
MappedFieldType originalFieldType = f.fieldType();
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type1", newList(f)); try {
FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2)); lookup.copyAndAddAll("type2", newList(f2), randomBoolean());
} catch (IllegalArgumentException e) {
assertNotSame(originalFieldType, f.fieldType()); assertThat(e.getMessage(), containsString("mapper [foo] has different [index_name]"));
assertSame(f.fieldType(), f2.fieldType()); }
assertSame(f.fieldType(), lookup2.get("foo"));
assertSame(f.fieldType(), lookup2.getByIndexName("foo"));
assertSame(f.fieldType(), lookup2.getByIndexName("bar"));
assertEquals(1, size(lookup2.iterator()));
} }
public void testAddExistingBridgeName() { public void testAddExistingBridgeName() {
FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); FakeFieldMapper f = new FakeFieldMapper("foo", "foo");
FakeFieldMapper f2 = new FakeFieldMapper("bar", "bar"); FakeFieldMapper f2 = new FakeFieldMapper("bar", "bar");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type1", newList(f, f2)); lookup = lookup.copyAndAddAll("type1", newList(f, f2), randomBoolean());
try { try {
FakeFieldMapper f3 = new FakeFieldMapper("foo", "bar"); FakeFieldMapper f3 = new FakeFieldMapper("foo", "bar");
lookup.copyAndAddAll("type2", newList(f3)); lookup.copyAndAddAll("type2", newList(f3), randomBoolean());
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
assertTrue(e.getMessage().contains("insane mappings")); assertTrue(e.getMessage().contains("insane mappings"));
} }
try { try {
FakeFieldMapper f3 = new FakeFieldMapper("bar", "foo"); FakeFieldMapper f3 = new FakeFieldMapper("bar", "foo");
lookup.copyAndAddAll("type2", newList(f3)); lookup.copyAndAddAll("type2", newList(f3), randomBoolean());
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
assertTrue(e.getMessage().contains("insane mappings")); assertTrue(e.getMessage().contains("insane mappings"));
} }
} }
public void testCheckCompatibilityNewField() {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar");
FieldTypeLookup lookup = new FieldTypeLookup();
lookup.checkCompatibility("type", newList(f1), false);
}
public void testCheckCompatibilityMismatchedTypes() { public void testCheckCompatibilityMismatchedTypes() {
FieldMapper f1 = new FakeFieldMapper("foo", "bar"); FieldMapper f1 = new FakeFieldMapper("foo", "bar");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type", newList(f1)); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean());
MappedFieldType ft2 = FakeFieldMapper.makeOtherFieldType("foo", "foo"); MappedFieldType ft2 = FakeFieldMapper.makeOtherFieldType("foo", "foo");
FieldMapper f2 = new FakeFieldMapper("foo", ft2); FieldMapper f2 = new FakeFieldMapper("foo", ft2);
try { try {
lookup.checkCompatibility("type2", newList(f2), false); lookup.copyAndAddAll("type2", newList(f2), false);
fail("expected type mismatch"); fail("expected type mismatch");
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]")); assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]"));
} }
// fails even if updateAllTypes == true // fails even if updateAllTypes == true
try { try {
lookup.checkCompatibility("type2", newList(f2), true); lookup.copyAndAddAll("type2", newList(f2), true);
fail("expected type mismatch"); fail("expected type mismatch");
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]")); assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]"));
@ -182,33 +167,33 @@ public class FieldTypeLookupTests extends ESTestCase {
public void testCheckCompatibilityConflict() { public void testCheckCompatibilityConflict() {
FieldMapper f1 = new FakeFieldMapper("foo", "bar"); FieldMapper f1 = new FakeFieldMapper("foo", "bar");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type", newList(f1)); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean());
MappedFieldType ft2 = FakeFieldMapper.makeFieldType("foo", "bar"); MappedFieldType ft2 = FakeFieldMapper.makeFieldType("foo", "bar");
ft2.setBoost(2.0f); ft2.setBoost(2.0f);
FieldMapper f2 = new FakeFieldMapper("foo", ft2); FieldMapper f2 = new FakeFieldMapper("foo", ft2);
try { try {
// different type // different type
lookup.checkCompatibility("type2", newList(f2), false); lookup.copyAndAddAll("type2", newList(f2), false);
fail("expected conflict"); fail("expected conflict");
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("to update [boost] across all types")); assertTrue(e.getMessage().contains("to update [boost] across all types"));
} }
lookup.checkCompatibility("type", newList(f2), false); // boost is updateable, so ok since we are implicitly updating all types lookup.copyAndAddAll("type", newList(f2), false); // boost is updateable, so ok since we are implicitly updating all types
lookup.checkCompatibility("type2", newList(f2), true); // boost is updateable, so ok if forcing lookup.copyAndAddAll("type2", newList(f2), true); // boost is updateable, so ok if forcing
// now with a non changeable setting // now with a non changeable setting
MappedFieldType ft3 = FakeFieldMapper.makeFieldType("foo", "bar"); MappedFieldType ft3 = FakeFieldMapper.makeFieldType("foo", "bar");
ft3.setStored(true); ft3.setStored(true);
FieldMapper f3 = new FakeFieldMapper("foo", ft3); FieldMapper f3 = new FakeFieldMapper("foo", ft3);
try { try {
lookup.checkCompatibility("type2", newList(f3), false); lookup.copyAndAddAll("type2", newList(f3), false);
fail("expected conflict"); fail("expected conflict");
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("has different [store] values")); assertTrue(e.getMessage().contains("has different [store] values"));
} }
// even with updateAllTypes == true, incompatible // even with updateAllTypes == true, incompatible
try { try {
lookup.checkCompatibility("type2", newList(f3), true); lookup.copyAndAddAll("type2", newList(f3), true);
fail("expected conflict"); fail("expected conflict");
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("has different [store] values")); assertTrue(e.getMessage().contains("has different [store] values"));
@ -219,7 +204,7 @@ public class FieldTypeLookupTests extends ESTestCase {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz"); FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz");
FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo"); FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type", newList(f1, f2)); lookup = lookup.copyAndAddAll("type", newList(f1, f2), randomBoolean());
Collection<String> names = lookup.simpleMatchToIndexNames("b*"); Collection<String> names = lookup.simpleMatchToIndexNames("b*");
assertTrue(names.contains("baz")); assertTrue(names.contains("baz"));
assertTrue(names.contains("boo")); assertTrue(names.contains("boo"));
@ -229,7 +214,7 @@ public class FieldTypeLookupTests extends ESTestCase {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz"); FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz");
FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo"); FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type", newList(f1, f2)); lookup = lookup.copyAndAddAll("type", newList(f1, f2), randomBoolean());
Collection<String> names = lookup.simpleMatchToFullName("b*"); Collection<String> names = lookup.simpleMatchToFullName("b*");
assertTrue(names.contains("foo")); assertTrue(names.contains("foo"));
assertTrue(names.contains("bar")); assertTrue(names.contains("bar"));
@ -238,7 +223,7 @@ public class FieldTypeLookupTests extends ESTestCase {
public void testIteratorImmutable() { public void testIteratorImmutable() {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar"); FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type", newList(f1)); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean());
try { try {
Iterator<MappedFieldType> itr = lookup.iterator(); Iterator<MappedFieldType> itr = lookup.iterator();

View File

@ -44,6 +44,7 @@ public class CamelCaseFieldNameTests extends ESSingleNodeTestCase {
assertNotNull(doc.dynamicMappingsUpdate()); assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get(); client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get();
documentMapper = index.mapperService().documentMapper("type");
assertNotNull(documentMapper.mappers().getMapper("thisIsCamelCase")); assertNotNull(documentMapper.mappers().getMapper("thisIsCamelCase"));
assertNull(documentMapper.mappers().getMapper("this_is_camel_case")); assertNull(documentMapper.mappers().getMapper("this_is_camel_case"));

View File

@ -32,6 +32,7 @@ import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.ParsedDocument;
@ -39,6 +40,7 @@ import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -127,6 +129,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
assertNotNull(parsedDoc.dynamicMappingsUpdate()); assertNotNull(parsedDoc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("type1").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); client().admin().indices().preparePutMapping("test").setType("type1").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get();
docMapper = index.mapperService().documentMapper("type1");
fieldMapper = docMapper.mappers().getMapper("new_field"); fieldMapper = docMapper.mappers().getMapper("new_field");
assertThat(fieldMapper, instanceOf(LongFieldMapper.class)); assertThat(fieldMapper, instanceOf(LongFieldMapper.class));
} }
@ -308,27 +311,15 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject().string(); .endObject().endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); MapperService mapperService = createIndex("test").mapperService();
DocumentMapper docMapperBefore = parser.parse("type1", new CompressedXContent(mappingBefore)); DocumentMapper docMapperBefore = mapperService.merge("type1", new CompressedXContent(mappingBefore), true, false);
List<String> fields = docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields(); assertEquals(Arrays.asList("foo", "bar"), docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields());
assertThat(fields.size(), equalTo(2)); DocumentMapper docMapperAfter = mapperService.merge("type1", new CompressedXContent(mappingAfter), false, false);
assertThat(fields.get(0), equalTo("foo"));
assertThat(fields.get(1), equalTo("bar"));
assertEquals(Arrays.asList("baz", "bar"), docMapperAfter.mappers().getMapper("copy_test").copyTo().copyToFields());
DocumentMapper docMapperAfter = parser.parse("type1", new CompressedXContent(mappingAfter)); assertEquals(Arrays.asList("foo", "bar"), docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields());
docMapperBefore.merge(docMapperAfter.mapping(), true, false);
docMapperBefore.merge(docMapperAfter.mapping(), false, false);
fields = docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields();
assertThat(fields.size(), equalTo(2));
assertThat(fields.get(0), equalTo("baz"));
assertThat(fields.get(1), equalTo("bar"));
} }
public void testCopyToNestedField() throws Exception { public void testCopyToNestedField() throws Exception {

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import java.io.IOException; import java.io.IOException;
@ -50,8 +51,8 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); MapperService mapperService = createIndex("test").mapperService();
DocumentMapper stage1 = parser.parse("person", new CompressedXContent(stage1Mapping)); DocumentMapper stage1 = mapperService.merge("person", new CompressedXContent(stage1Mapping), true, false);
String stage2Mapping = XContentFactory.jsonBuilder().startObject() String stage2Mapping = XContentFactory.jsonBuilder().startObject()
.startObject("person") .startObject("person")
@ -62,15 +63,12 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapper stage2 = parser.parse("person", new CompressedXContent(stage2Mapping)); DocumentMapper stage2 = mapperService.merge("person", new CompressedXContent(stage2Mapping), false, false);
stage1.merge(stage2.mapping(), true, false); // previous mapper has not been modified
// Just simulated so merge hasn't happened yet
assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword")); assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword"));
// but the new one has the change
stage1.merge(stage2.mapping(), false, false); assertThat(((TokenCountFieldMapper) stage2.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard"));
// Just simulated so merge hasn't happened yet
assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard"));
} }
public void testCountPositions() throws IOException { public void testCountPositions() throws IOException {

View File

@ -80,7 +80,9 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
.startObject("properties").endObject() .startObject("properties").endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapper defaultMapper = mapper("test", "type", mapping); IndexService index = createIndex("test");
client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get();
DocumentMapper defaultMapper = index.mapperService().documentMapper("type");
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject() .startObject()
@ -94,6 +96,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
assertNotNull(doc.dynamicMappingsUpdate()); assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get(); client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get();
defaultMapper = index.mapperService().documentMapper("type");
FieldMapper fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field1"); FieldMapper fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field1");
assertThat(fieldMapper, instanceOf(DateFieldMapper.class)); assertThat(fieldMapper, instanceOf(DateFieldMapper.class));
DateFieldMapper dateFieldMapper = (DateFieldMapper)fieldMapper; DateFieldMapper dateFieldMapper = (DateFieldMapper)fieldMapper;
@ -384,7 +387,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
Map<String, String> config = getConfigurationViaXContent(initialDateFieldMapper); Map<String, String> config = getConfigurationViaXContent(initialDateFieldMapper);
assertThat(config.get("format"), is("EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy")); assertThat(config.get("format"), is("EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy"));
defaultMapper.merge(mergeMapper.mapping(), false, false); defaultMapper = defaultMapper.merge(mergeMapper.mapping(), false);
assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(DateFieldMapper.class))); assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(DateFieldMapper.class)));

View File

@ -44,6 +44,7 @@ public class GenericStoreDynamicTemplateTests extends ESSingleNodeTestCase {
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-data.json"); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-data.json");
ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json));
client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get();
docMapper = index.mapperService().documentMapper("person");
Document doc = parsedDoc.rootDoc(); Document doc = parsedDoc.rootDoc();
IndexableField f = doc.getField("name"); IndexableField f = doc.getField("name");

View File

@ -44,6 +44,7 @@ public class PathMatchDynamicTemplateTests extends ESSingleNodeTestCase {
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-data.json"); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-data.json");
ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json));
client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get();
docMapper = index.mapperService().documentMapper("person");
Document doc = parsedDoc.rootDoc(); Document doc = parsedDoc.rootDoc();
IndexableField f = doc.getField("name"); IndexableField f = doc.getField("name");

View File

@ -55,6 +55,7 @@ public class SimpleDynamicTemplatesTests extends ESSingleNodeTestCase {
ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", builder.bytes()); ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", builder.bytes());
client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get();
docMapper = index.mapperService().documentMapper("person");
DocumentFieldMappers mappers = docMapper.mappers(); DocumentFieldMappers mappers = docMapper.mappers();
assertThat(mappers.smartNameFieldMapper("s"), Matchers.notNullValue()); assertThat(mappers.smartNameFieldMapper("s"), Matchers.notNullValue());
@ -74,6 +75,7 @@ public class SimpleDynamicTemplatesTests extends ESSingleNodeTestCase {
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json"); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json");
ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json));
client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get();
docMapper = index.mapperService().documentMapper("person");
Document doc = parsedDoc.rootDoc(); Document doc = parsedDoc.rootDoc();
IndexableField f = doc.getField("name"); IndexableField f = doc.getField("name");
@ -130,6 +132,7 @@ public class SimpleDynamicTemplatesTests extends ESSingleNodeTestCase {
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json"); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json");
ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json)); ParsedDocument parsedDoc = docMapper.parse("test", "person", "1", new BytesArray(json));
client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get(); client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get();
docMapper = index.mapperService().documentMapper("person");
Document doc = parsedDoc.rootDoc(); Document doc = parsedDoc.rootDoc();
IndexableField f = doc.getField("name"); IndexableField f = doc.getField("name");

View File

@ -35,6 +35,7 @@ import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.BinaryFieldMapper; import org.elasticsearch.index.mapper.core.BinaryFieldMapper;
import org.elasticsearch.index.mapper.core.BooleanFieldMapper; import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper; import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
@ -160,11 +161,11 @@ public class ExternalMapper extends FieldMapper {
private final String generatedValue; private final String generatedValue;
private final String mapperName; private final String mapperName;
private final BinaryFieldMapper binMapper; private BinaryFieldMapper binMapper;
private final BooleanFieldMapper boolMapper; private BooleanFieldMapper boolMapper;
private final BaseGeoPointFieldMapper pointMapper; private BaseGeoPointFieldMapper pointMapper;
private final GeoShapeFieldMapper shapeMapper; private GeoShapeFieldMapper shapeMapper;
private final FieldMapper stringMapper; private FieldMapper stringMapper;
public ExternalMapper(String simpleName, MappedFieldType fieldType, public ExternalMapper(String simpleName, MappedFieldType fieldType,
String generatedValue, String mapperName, String generatedValue, String mapperName,
@ -216,6 +217,36 @@ public class ExternalMapper extends FieldMapper {
// ignore this for now // ignore this for now
} }
@Override
public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
ExternalMapper update = (ExternalMapper) super.updateFieldType(fullNameToFieldType);
MultiFields multiFieldsUpdate = multiFields.updateFieldType(fullNameToFieldType);
BinaryFieldMapper binMapperUpdate = (BinaryFieldMapper) binMapper.updateFieldType(fullNameToFieldType);
BooleanFieldMapper boolMapperUpdate = (BooleanFieldMapper) boolMapper.updateFieldType(fullNameToFieldType);
GeoPointFieldMapper pointMapperUpdate = (GeoPointFieldMapper) pointMapper.updateFieldType(fullNameToFieldType);
GeoShapeFieldMapper shapeMapperUpdate = (GeoShapeFieldMapper) shapeMapper.updateFieldType(fullNameToFieldType);
StringFieldMapper stringMapperUpdate = (StringFieldMapper) stringMapper.updateFieldType(fullNameToFieldType);
if (update == this
&& multiFieldsUpdate == multiFields
&& binMapperUpdate == binMapper
&& boolMapperUpdate == boolMapper
&& pointMapperUpdate == pointMapper
&& shapeMapperUpdate == shapeMapper
&& stringMapperUpdate == stringMapper) {
return this;
}
if (update == this) {
update = (ExternalMapper) clone();
}
update.multiFields = multiFieldsUpdate;
update.binMapper = binMapperUpdate;
update.boolMapper = boolMapperUpdate;
update.pointMapper = pointMapperUpdate;
update.shapeMapper = shapeMapperUpdate;
update.stringMapper = stringMapperUpdate;
return update;
}
@Override @Override
public Iterator<Mapper> iterator() { public Iterator<Mapper> iterator() {
return Iterators.concat(super.iterator(), Arrays.asList(binMapper, boolMapper, pointMapper, shapeMapper, stringMapper).iterator()); return Iterators.concat(super.iterator(), Arrays.asList(binMapper, boolMapper, pointMapper, shapeMapper, stringMapper).iterator());

View File

@ -53,23 +53,11 @@ public class ExternalMetadataMapper extends MetadataFieldMapper {
super(FIELD_NAME, FIELD_TYPE, FIELD_TYPE, indexSettings); super(FIELD_NAME, FIELD_TYPE, FIELD_TYPE, indexSettings);
} }
@Override
public String name() {
return CONTENT_TYPE;
}
@Override @Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException { protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
// handled in post parse // handled in post parse
} }
@Override
public void doMerge(Mapper mergeWith, boolean updateAllTypes) {
if (!(mergeWith instanceof ExternalMetadataMapper)) {
throw new IllegalArgumentException("Trying to merge " + mergeWith + " with " + this);
}
}
@Override @Override
public Iterator<Mapper> iterator() { public Iterator<Mapper> iterator() {
return Collections.emptyIterator(); return Collections.emptyIterator();
@ -97,7 +85,7 @@ public class ExternalMetadataMapper extends MetadataFieldMapper {
public static class Builder extends MetadataFieldMapper.Builder<Builder, ExternalMetadataMapper> { public static class Builder extends MetadataFieldMapper.Builder<Builder, ExternalMetadataMapper> {
protected Builder() { protected Builder() {
super(CONTENT_TYPE, FIELD_TYPE, FIELD_TYPE); super(FIELD_NAME, FIELD_TYPE, FIELD_TYPE);
} }
@Override @Override

View File

@ -376,7 +376,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01).field("orientation", "ccw") .field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01).field("orientation", "ccw")
.endObject().endObject().endObject().endObject().string(); .endObject().endObject().endObject().endObject().string();
MapperService mapperService = createIndex("test").mapperService(); MapperService mapperService = createIndex("test").mapperService();
DocumentMapper stage1 = mapperService.merge("type", new CompressedXContent(stage1Mapping), true, false); DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping), true, false);
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("shape").field("type", "geo_shape").field("tree", "quadtree") .startObject("properties").startObject("shape").field("type", "geo_shape").field("tree", "quadtree")
.field("strategy", "term").field("precision", "1km").field("tree_levels", 26).field("distance_error_pct", 26) .field("strategy", "term").field("precision", "1km").field("tree_levels", 26).field("distance_error_pct", 26)
@ -392,7 +392,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
} }
// verify nothing changed // verify nothing changed
FieldMapper fieldMapper = stage1.mappers().getMapper("shape"); FieldMapper fieldMapper = docMapper.mappers().getMapper("shape");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper;
@ -408,9 +408,9 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m") .startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m")
.field("tree_levels", 8).field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject().string(); .field("tree_levels", 8).field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject().string();
mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false); docMapper = mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false);
fieldMapper = stage1.mappers().getMapper("shape"); fieldMapper = docMapper.mappers().getMapper("shape");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper;

View File

@ -25,7 +25,7 @@ import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.internal.IndexFieldMapper; import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
@ -94,33 +94,16 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase {
String mappingWithIndexEnabled = XContentFactory.jsonBuilder().startObject().startObject("type") String mappingWithIndexEnabled = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_index").field("enabled", true).endObject() .startObject("_index").field("enabled", true).endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapperParser parser = createIndex("test", bwcSettings).mapperService().documentMapperParser(); MapperService mapperService = createIndex("test", bwcSettings).mapperService();
DocumentMapper mapperEnabled = parser.parse("type", new CompressedXContent(mappingWithIndexEnabled)); DocumentMapper mapperEnabled = mapperService.merge("type", new CompressedXContent(mappingWithIndexEnabled), true, false);
assertThat(mapperEnabled.IndexFieldMapper().enabled(), is(true));
String mappingWithIndexDisabled = XContentFactory.jsonBuilder().startObject().startObject("type") String mappingWithIndexDisabled = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_index").field("enabled", false).endObject() .startObject("_index").field("enabled", false).endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapper mapperDisabled = parser.parse("type", new CompressedXContent(mappingWithIndexDisabled)); DocumentMapper merged = mapperService.merge("type", new CompressedXContent(mappingWithIndexDisabled), false, false);
mapperEnabled.merge(mapperDisabled.mapping(), false, false); assertThat(merged.IndexFieldMapper().enabled(), is(false));
assertThat(mapperEnabled.IndexFieldMapper().enabled(), is(false));
}
public void testThatDisablingWorksWhenMergingBackcompat() throws Exception {
String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_index").field("enabled", true).endObject()
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test", bwcSettings).mapperService().documentMapperParser();
DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(enabledMapping));
String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_index").field("enabled", false).endObject()
.endObject().endObject().string();
DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(disabledMapping));
enabledMapper.merge(disabledMapper.mapping(), false, false);
assertThat(enabledMapper.indexMapper().enabled(), is(false));
} }
public void testCustomSettingsBackcompat() throws Exception { public void testCustomSettingsBackcompat() throws Exception {

View File

@ -187,15 +187,13 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_field_names").field("enabled", false).endObject() .startObject("_field_names").field("enabled", false).endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); MapperService mapperService = createIndex("test").mapperService();
DocumentMapper mapperEnabled = parser.parse("type", new CompressedXContent(enabledMapping)); DocumentMapper mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), true, false);
DocumentMapper mapperDisabled = parser.parse("type", new CompressedXContent(disabledMapping)); DocumentMapper mapperDisabled = mapperService.merge("type", new CompressedXContent(disabledMapping), false, false);
mapperEnabled.merge(mapperDisabled.mapping(), false, false); assertFalse(mapperDisabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled());
assertFalse(mapperEnabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled());
mapperEnabled = parser.parse("type", new CompressedXContent(enabledMapping)); mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), false, false);
mapperDisabled.merge(mapperEnabled.mapping(), false, false);
assertTrue(mapperEnabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); assertTrue(mapperEnabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled());
} }

View File

@ -59,6 +59,7 @@ public class DoubleIndexingDocTests extends ESSingleNodeTestCase {
.bytes()); .bytes());
assertNotNull(doc.dynamicMappingsUpdate()); assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get(); client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get();
mapper = index.mapperService().documentMapper("type");
writer.addDocument(doc.rootDoc()); writer.addDocument(doc.rootDoc());
writer.addDocument(doc.rootDoc()); writer.addDocument(doc.rootDoc());

View File

@ -59,15 +59,13 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject().string(); .endObject().endObject().endObject().string();
DocumentMapper stage2 = parser.parse("person", new CompressedXContent(stage2Mapping)); DocumentMapper stage2 = parser.parse("person", new CompressedXContent(stage2Mapping));
stage1.merge(stage2.mapping(), true, false); DocumentMapper merged = stage1.merge(stage2.mapping(), false);
// since we are simulating, we should not have the age mapping // stage1 mapping should not have been modified
assertThat(stage1.mappers().smartNameFieldMapper("age"), nullValue()); assertThat(stage1.mappers().smartNameFieldMapper("age"), nullValue());
assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), nullValue()); assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), nullValue());
// now merge, don't simulate // but merged should
stage1.merge(stage2.mapping(), false, false); assertThat(merged.mappers().smartNameFieldMapper("age"), notNullValue());
// but we have the age in assertThat(merged.mappers().smartNameFieldMapper("obj1.prop1"), notNullValue());
assertThat(stage1.mappers().smartNameFieldMapper("age"), notNullValue());
assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), notNullValue());
} }
public void testMergeObjectDynamic() throws Exception { public void testMergeObjectDynamic() throws Exception {
@ -80,8 +78,8 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
DocumentMapper withDynamicMapper = parser.parse("type1", new CompressedXContent(withDynamicMapping)); DocumentMapper withDynamicMapper = parser.parse("type1", new CompressedXContent(withDynamicMapping));
assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE)); assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
mapper.merge(withDynamicMapper.mapping(), false, false); DocumentMapper merged = mapper.merge(withDynamicMapper.mapping(), false);
assertThat(mapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE)); assertThat(merged.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
} }
public void testMergeObjectAndNested() throws Exception { public void testMergeObjectAndNested() throws Exception {
@ -96,14 +94,14 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
DocumentMapper nestedMapper = parser.parse("type1", new CompressedXContent(nestedMapping)); DocumentMapper nestedMapper = parser.parse("type1", new CompressedXContent(nestedMapping));
try { try {
objectMapper.merge(nestedMapper.mapping(), true, false); objectMapper.merge(nestedMapper.mapping(), false);
fail(); fail();
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("object mapping [obj] can't be changed from non-nested to nested")); assertThat(e.getMessage(), containsString("object mapping [obj] can't be changed from non-nested to nested"));
} }
try { try {
nestedMapper.merge(objectMapper.mapping(), true, false); nestedMapper.merge(objectMapper.mapping(), false);
fail(); fail();
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("object mapping [obj] can't be changed from nested to non-nested")); assertThat(e.getMessage(), containsString("object mapping [obj] can't be changed from nested to non-nested"));
@ -123,13 +121,13 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
DocumentMapper changed = parser.parse("type", new CompressedXContent(mapping2)); DocumentMapper changed = parser.parse("type", new CompressedXContent(mapping2));
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace"));
existing.merge(changed.mapping(), false, false); DocumentMapper merged = existing.merge(changed.mapping(), false);
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("keyword")); assertThat(((NamedAnalyzer) merged.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("keyword"));
} }
public void testChangeSearchAnalyzerToDefault() throws Exception { public void testChangeSearchAnalyzerToDefault() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); MapperService mapperService = createIndex("test").mapperService();
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject() .startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject()
.endObject().endObject().string(); .endObject().endObject().string();
@ -137,14 +135,13 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("ignore_above", 14).endObject().endObject() .startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("ignore_above", 14).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapper existing = parser.parse("type", new CompressedXContent(mapping1)); DocumentMapper existing = mapperService.merge("type", new CompressedXContent(mapping1), true, false);
DocumentMapper changed = parser.parse("type", new CompressedXContent(mapping2)); DocumentMapper merged = mapperService.merge("type", new CompressedXContent(mapping2), false, false);
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace"));
existing.merge(changed.mapping(), false, false);
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("standard")); assertThat(((NamedAnalyzer) merged.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("standard"));
assertThat(((StringFieldMapper) (existing.mappers().getMapper("field"))).getIgnoreAbove(), equalTo(14)); assertThat(((StringFieldMapper) (merged.mappers().getMapper("field"))).getIgnoreAbove(), equalTo(14));
} }
public void testConcurrentMergeTest() throws Throwable { public void testConcurrentMergeTest() throws Throwable {

View File

@ -150,19 +150,17 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
public void testBuildThenParse() throws Exception { public void testBuildThenParse() throws Exception {
IndexService indexService = createIndex("test"); IndexService indexService = createIndex("test");
Settings settings = indexService.getIndexSettings().getSettings();
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
DocumentMapper builderDocMapper = doc(settings, rootObject("person").add( DocumentMapper builderDocMapper = doc(rootObject("person").add(
stringField("name").store(true) stringField("name").store(true)
.addMultiField(stringField("indexed").index(true).tokenized(true)) .addMultiField(stringField("indexed").index(true).tokenized(true))
.addMultiField(stringField("not_indexed").index(false).store(true)) .addMultiField(stringField("not_indexed").index(false).store(true))
), indexService.mapperService()).build(indexService.mapperService(), mapperParser); ), indexService.mapperService()).build(indexService.mapperService());
String builtMapping = builderDocMapper.mappingSource().string(); String builtMapping = builderDocMapper.mappingSource().string();
// System.out.println(builtMapping); // System.out.println(builtMapping);
// reparse it // reparse it
DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(builtMapping)); DocumentMapper docMapper = indexService.mapperService().documentMapperParser().parse("person", new CompressedXContent(builtMapping));
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json")); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json"));

View File

@ -25,7 +25,6 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
@ -41,9 +40,9 @@ import static org.hamcrest.Matchers.nullValue;
public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
public void testMergeMultiField() throws Exception { public void testMergeMultiField() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json"); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json");
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); MapperService mapperService = createIndex("test").mapperService();
DocumentMapper docMapper = parser.parse("person", new CompressedXContent(mapping)); DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), true, false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue());
@ -56,11 +55,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
assertThat(f, nullValue()); assertThat(f, nullValue());
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json"); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json");
DocumentMapper docMapper2 = parser.parse("person", new CompressedXContent(mapping)); docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false);
docMapper.merge(docMapper2.mapping(), true, false);
docMapper.merge(docMapper2.mapping(), false, false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
@ -77,11 +72,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
assertThat(f, notNullValue()); assertThat(f, notNullValue());
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json"); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json");
DocumentMapper docMapper3 = parser.parse("person", new CompressedXContent(mapping)); docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false);
docMapper.merge(docMapper3.mapping(), true, false);
docMapper.merge(docMapper3.mapping(), false, false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
@ -92,11 +83,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue());
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json"); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json");
DocumentMapper docMapper4 = parser.parse("person", new CompressedXContent(mapping)); docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false);
docMapper.merge(docMapper4.mapping(), true, false);
docMapper.merge(docMapper4.mapping(), false, false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
@ -125,7 +112,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json"); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json");
mapperService.merge("person", new CompressedXContent(mapping), false, false); docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
@ -142,7 +129,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
assertThat(f, notNullValue()); assertThat(f, notNullValue());
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json"); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json");
mapperService.merge("person", new CompressedXContent(mapping), false, false); docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());

View File

@ -74,6 +74,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
assertNotNull(doc.dynamicMappingsUpdate()); assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get(); client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get();
defaultMapper = index.mapperService().documentMapper("type");
FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long"); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long");
assertThat(mapper, instanceOf(LongFieldMapper.class)); assertThat(mapper, instanceOf(LongFieldMapper.class));
@ -98,6 +99,7 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
assertNotNull(doc.dynamicMappingsUpdate()); assertNotNull(doc.dynamicMappingsUpdate());
assertAcked(client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get()); assertAcked(client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get());
defaultMapper = index.mapperService().documentMapper("type");
FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long"); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long");
assertThat(mapper, instanceOf(StringFieldMapper.class)); assertThat(mapper, instanceOf(StringFieldMapper.class));

View File

@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.simple;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
@ -48,12 +47,10 @@ import static org.hamcrest.Matchers.equalTo;
public class SimpleMapperTests extends ESSingleNodeTestCase { public class SimpleMapperTests extends ESSingleNodeTestCase {
public void testSimpleMapper() throws Exception { public void testSimpleMapper() throws Exception {
IndexService indexService = createIndex("test"); IndexService indexService = createIndex("test");
Settings settings = indexService.getIndexSettings().getSettings(); DocumentMapper docMapper = doc(
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
DocumentMapper docMapper = doc(settings,
rootObject("person") rootObject("person")
.add(object("name").add(stringField("first").store(true).index(false))), .add(object("name").add(stringField("first").store(true).index(false))),
indexService.mapperService()).build(indexService.mapperService(), mapperParser); indexService.mapperService()).build(indexService.mapperService());
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json"));
Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); Document doc = docMapper.parse("test", "person", "1", json).rootDoc();
@ -110,12 +107,10 @@ public class SimpleMapperTests extends ESSingleNodeTestCase {
public void testNoDocumentSent() throws Exception { public void testNoDocumentSent() throws Exception {
IndexService indexService = createIndex("test"); IndexService indexService = createIndex("test");
Settings settings = indexService.getIndexSettings().getSettings(); DocumentMapper docMapper = doc(
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
DocumentMapper docMapper = doc(settings,
rootObject("person") rootObject("person")
.add(object("name").add(stringField("first").store(true).index(false))), .add(object("name").add(stringField("first").store(true).index(false))),
indexService.mapperService()).build(indexService.mapperService(), mapperParser); indexService.mapperService()).build(indexService.mapperService());
BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8)); BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8));
try { try {

View File

@ -196,10 +196,10 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase {
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping1)); DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping1));
docMapper = parser.parse("type", docMapper.mappingSource()); docMapper = parser.parse("type", docMapper.mappingSource());
if (conflicts.length == 0) { if (conflicts.length == 0) {
docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), true, false); docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), false);
} else { } else {
try { try {
docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), true, false); docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), false);
fail(); fail();
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
for (String conflict : conflicts) { for (String conflict : conflicts) {

View File

@ -40,6 +40,7 @@ import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper;
@ -478,7 +479,8 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("field").field("type", "string").endObject().endObject() .startObject("properties").startObject("field").field("type", "string").endObject().endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapper defaultMapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), true, false); MapperService mapperService = indexService.mapperService();
DocumentMapper defaultMapper = mapperService.merge("type", new CompressedXContent(mapping), true, false);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject() .startObject()
@ -492,7 +494,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject() .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject()
.endObject().endObject().endObject().endObject().string(); .endObject().endObject().endObject().endObject().string();
defaultMapper.merge(parser.parse("type", new CompressedXContent(updatedMapping)).mapping(), false, false); defaultMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), false, false);
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject() .startObject()
@ -507,7 +509,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject() .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject()
.endObject().endObject().endObject().endObject().string(); .endObject().endObject().endObject().endObject().string();
try { try {
defaultMapper.merge(parser.parse("type", new CompressedXContent(updatedMapping)).mapping(), true, false); mapperService.merge("type", new CompressedXContent(updatedMapping), false, false);
fail(); fail();
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("different [omit_norms]")); assertThat(e.getMessage(), containsString("different [omit_norms]"));

View File

@ -143,17 +143,16 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", true).endObject() .startObject("_timestamp").field("enabled", true).endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); MapperService mapperService = createIndex("test").mapperService();
DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(enabledMapping)); DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), true, false);
String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", false).endObject() .startObject("_timestamp").field("enabled", false).endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(disabledMapping)); DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), false, false);
enabledMapper.merge(disabledMapper.mapping(), false, false); assertThat(enabledMapper.timestampFieldMapper().enabled(), is(true));
assertThat(disabledMapper.timestampFieldMapper().enabled(), is(false));
assertThat(enabledMapper.timestampFieldMapper().enabled(), is(false));
} }
// issue 3174 // issue 3174
@ -504,16 +503,16 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "lazy").field("format", "doc_values").endObject().field("store", "yes").endObject() .startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "lazy").field("format", "doc_values").endObject().field("store", "yes").endObject()
.endObject().endObject().string(); .endObject().endObject().string();
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapperParser parser = createIndex("test", indexSettings).mapperService().documentMapperParser(); MapperService mapperService = createIndex("test", indexSettings).mapperService();
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(mapping), true, false);
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY));
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("doc_values")); assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("doc_values"));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type") mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject() .startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject()
.endObject().endObject().string(); .endObject().endObject().string();
docMapper.merge(parser.parse("type", new CompressedXContent(mapping)).mapping(), false, false); docMapper = mapperService.merge("type", new CompressedXContent(mapping), false, false);
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.EAGER)); assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.EAGER));
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("array")); assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("array"));
} }
@ -571,8 +570,8 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.startObject("fielddata").field("format", "array").endObject() .startObject("fielddata").field("format", "array").endObject()
.field("store", "no") .field("store", "no")
.field("index", "no") .field("index", "no")
.field("path", "bar") .field("path", "foo")
.field("default", "1970-01-02") .field("default", "1970-01-01")
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject().string();
@ -584,6 +583,24 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [store] values")); assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [store] values"));
} }
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", false)
.startObject("fielddata").field("format", "array").endObject()
.field("store", "yes")
.field("index", "analyzed")
.field("path", "bar")
.field("default", "1970-01-02")
.endObject()
.endObject().endObject().string();
try {
mapperService.merge("type", new CompressedXContent(mapping), false, false);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("Cannot update default in _timestamp value"));
assertThat(e.getMessage(), containsString("Cannot update path in _timestamp value"));
}
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY));
assertTrue(docMapper.timestampFieldMapper().enabled()); assertTrue(docMapper.timestampFieldMapper().enabled());
@ -650,7 +667,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
public void testBackcompatMergePaths() throws Exception { public void testBackcompatMergePaths() throws Exception {
String[] possiblePathValues = {"some_path", "anotherPath", null}; String[] possiblePathValues = {"some_path", "anotherPath", null};
DocumentMapperParser parser = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser(); MapperService mapperService = createIndex("test", BWC_SETTINGS).mapperService();
XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject() XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject()
.startObject("type") .startObject("type")
.startObject("_timestamp"); .startObject("_timestamp");
@ -670,21 +687,17 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
mapping2.endObject() mapping2.endObject()
.endObject().endObject(); .endObject().endObject();
assertConflict(mapping1.string(), mapping2.string(), parser, (path1 == path2 ? null : "Cannot update path in _timestamp value")); assertConflict(mapperService, "type", mapping1.string(), mapping2.string(), (path1 == path2 ? null : "Cannot update path in _timestamp value"));
} }
void assertConflict(String mapping1, String mapping2, DocumentMapperParser parser, String conflict) throws IOException { void assertConflict(MapperService mapperService, String type, String mapping1, String mapping2, String conflict) throws IOException {
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping1)); mapperService.merge("type", new CompressedXContent(mapping1), true, false);
docMapper = parser.parse("type", docMapper.mappingSource()); try {
if (conflict == null) { mapperService.merge("type", new CompressedXContent(mapping2), false, false);
docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), true, false); assertNull(conflict);
} else { } catch (IllegalArgumentException e) {
try { assertNotNull(conflict);
docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), true, false); assertThat(e.getMessage(), containsString(conflict));
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString(conflict));
}
} }
} }

View File

@ -33,8 +33,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.mapper.internal.TTLFieldMapper; import org.elasticsearch.index.mapper.internal.TTLFieldMapper;
@ -111,13 +111,12 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject() .startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); MapperService mapperService = createIndex("test").mapperService();
DocumentMapper mapperWithoutTtl = parser.parse("type", new CompressedXContent(mappingWithoutTtl)); DocumentMapper mapperWithoutTtl = mapperService.merge("type", new CompressedXContent(mappingWithoutTtl), true, false);
DocumentMapper mapperWithTtl = parser.parse("type", new CompressedXContent(mappingWithTtl)); DocumentMapper mapperWithTtl = mapperService.merge("type", new CompressedXContent(mappingWithTtl), false, false);
mapperWithoutTtl.merge(mapperWithTtl.mapping(), false, false); assertThat(mapperWithoutTtl.TTLFieldMapper().enabled(), equalTo(false));
assertThat(mapperWithTtl.TTLFieldMapper().enabled(), equalTo(true));
assertThat(mapperWithoutTtl.TTLFieldMapper().enabled(), equalTo(true));
} }
public void testThatChangingTTLKeepsMapperEnabled() throws Exception { public void testThatChangingTTLKeepsMapperEnabled() throws Exception {
@ -135,24 +134,22 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject() .startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); MapperService mapperService = createIndex("test").mapperService();
DocumentMapper initialMapper = parser.parse("type", new CompressedXContent(mappingWithTtl)); DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), true, false);
DocumentMapper updatedMapper = parser.parse("type", new CompressedXContent(updatedMapping)); DocumentMapper updatedMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), false, false);
initialMapper.merge(updatedMapper.mapping(), true, false);
assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true)); assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true));
assertThat(updatedMapper.TTLFieldMapper().enabled(), equalTo(true));
} }
public void testThatDisablingTTLReportsConflict() throws Exception { public void testThatDisablingTTLReportsConflict() throws Exception {
String mappingWithTtl = getMappingWithTtlEnabled().string(); String mappingWithTtl = getMappingWithTtlEnabled().string();
String mappingWithTtlDisabled = getMappingWithTtlDisabled().string(); String mappingWithTtlDisabled = getMappingWithTtlDisabled().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); MapperService mapperService = createIndex("test").mapperService();
DocumentMapper initialMapper = parser.parse("type", new CompressedXContent(mappingWithTtl)); DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), true, false);
DocumentMapper updatedMapper = parser.parse("type", new CompressedXContent(mappingWithTtlDisabled));
try { try {
initialMapper.merge(updatedMapper.mapping(), true, false); mapperService.merge("type", new CompressedXContent(mappingWithTtlDisabled), false, false);
fail(); fail();
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
// expected // expected
@ -190,20 +187,20 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
public void testNoConflictIfNothingSetAndDisabledLater() throws Exception { public void testNoConflictIfNothingSetAndDisabledLater() throws Exception {
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type");
XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d"); XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d");
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean(), false); indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlDisabled.string()), randomBoolean(), false);
} }
public void testNoConflictIfNothingSetAndEnabledLater() throws Exception { public void testNoConflictIfNothingSetAndEnabledLater() throws Exception {
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type");
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean(), false); indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlEnabled.string()), randomBoolean(), false);
} }
public void testMergeWithOnlyDefaultSet() throws Exception { public void testMergeWithOnlyDefaultSet() throws Exception {
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled);
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false); indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), false, false);
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
} }
@ -214,65 +211,11 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource(); CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false); indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), false, false);
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
} }
public void testThatSimulatedMergingLeavesStateUntouched() throws Exception {
//check if default ttl changed when simulate set to true
XContentBuilder mappingWithTtl = getMappingWithTtlEnabled("6d");
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtl);
CompressedXContent mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
XContentBuilder mappingWithTtlDifferentDefault = getMappingWithTtlEnabled("7d");
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDifferentDefault.string()), true).mapping(), true, false);
// make sure simulate flag actually worked - no mappings applied
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge));
client().admin().indices().prepareDelete("testindex").get();
// check if enabled changed when simulate set to true
XContentBuilder mappingWithoutTtl = getMappingWithTtlDisabled();
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl);
mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled();
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false);
// make sure simulate flag actually worked - no mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge));
client().admin().indices().prepareDelete("testindex").get();
// check if enabled changed when simulate set to true
mappingWithoutTtl = getMappingWithTtlDisabled("6d");
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl);
mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false);
// make sure simulate flag actually worked - no mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge));
client().admin().indices().prepareDelete("testindex").get();
// check if switching simulate flag off works
mappingWithoutTtl = getMappingWithTtlDisabled("6d");
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl);
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false);
// make sure simulate flag actually worked - mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
client().admin().indices().prepareDelete("testindex").get();
// check if switching simulate flag off works if nothing was applied in the beginning
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type");
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false);
// make sure simulate flag actually worked - mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
}
public void testIncludeInObjectBackcompat() throws Exception { public void testIncludeInObjectBackcompat() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_ttl").field("enabled", true).endObject() .startObject("_ttl").field("enabled", true).endObject()

View File

@ -76,7 +76,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException { private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException {
IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping); IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping);
// simulate like in MetaDataMappingService#putMapping // simulate like in MetaDataMappingService#putMapping
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), false, false); indexService.mapperService().merge("type", new CompressedXContent(mappingUpdate.bytes()), false, false);
// make sure mappings applied // make sure mappings applied
CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterUpdate.toString(), equalTo(expectedMapping.string())); assertThat(mappingAfterUpdate.toString(), equalTo(expectedMapping.string()));
@ -99,7 +99,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource(); CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource();
// simulate like in MetaDataMappingService#putMapping // simulate like in MetaDataMappingService#putMapping
try { try {
indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), true, false); indexService.mapperService().merge("type", new CompressedXContent(mappingUpdate.bytes()), true, false);
fail(); fail();
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
// expected // expected
@ -123,14 +123,14 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
mapperService.merge("type", new CompressedXContent(update.string()), false, false); mapperService.merge("type", new CompressedXContent(update.string()), false, false);
fail(); fail();
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); assertThat(e.getMessage(), containsString("mapper [foo] of different type, current_type [long], merged_type [double]"));
} }
try { try {
mapperService.merge("type", new CompressedXContent(update.string()), false, false); mapperService.merge("type", new CompressedXContent(update.string()), false, false);
fail(); fail();
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); assertThat(e.getMessage(), containsString("mapper [foo] of different type, current_type [long], merged_type [double]"));
} }
assertTrue(mapperService.documentMapper("type").mapping().root().getMapper("foo") instanceof LongFieldMapper); assertTrue(mapperService.documentMapper("type").mapping().root().getMapper("foo") instanceof LongFieldMapper);

View File

@ -149,7 +149,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase {
.setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}").execute().actionGet(); .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}").execute().actionGet();
fail("Expected MergeMappingException"); fail("Expected MergeMappingException");
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("mapper [body] cannot be changed from type [string] to [int]")); assertThat(e.getMessage(), containsString("mapper [body] of different type, current_type [string], merged_type [integer]"));
} }
} }

View File

@ -2,7 +2,7 @@
"type": { "type": {
"_all": { "_all": {
"store": false, "store": false,
"enabled": false, "enabled": true,
"store_term_vectors": false, "store_term_vectors": false,
"store_term_vector_offsets": false, "store_term_vector_offsets": false,
"store_term_vector_positions": false, "store_term_vector_positions": false,

View File

@ -134,14 +134,13 @@ public class SizeMappingTests extends ESSingleNodeTestCase {
String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_size").field("enabled", true).endObject() .startObject("_size").field("enabled", true).endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(enabledMapping)); DocumentMapper enabledMapper = indexService.mapperService().merge("type", new CompressedXContent(enabledMapping), true, false);
String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_size").field("enabled", false).endObject() .startObject("_size").field("enabled", false).endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(disabledMapping)); DocumentMapper disabledMapper = indexService.mapperService().merge("type", new CompressedXContent(disabledMapping), false, false);
enabledMapper.merge(disabledMapper.mapping(), false, false); assertThat(disabledMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false));
assertThat(enabledMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false));
} }
} }