Mappings: Remove dead code after previous refactorings

This is mostly removing code that handled deletion of types, which was
removed in #8877.

closes #10666
This commit is contained in:
Ryan Ernst 2015-04-19 15:45:46 -07:00
parent 24d1f595a5
commit 3a04d3ca91
15 changed files with 73 additions and 164 deletions

View File

@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.indices.mapping.get;
import com.google.common.base.Predicate;
import com.google.common.collect.Collections2;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData;
import org.elasticsearch.action.support.ActionFilters;
@ -187,7 +188,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleCustomO
} else if (Regex.isSimpleMatchPattern(field)) {
// go through the field mappers 3 times, to make sure we give preference to the resolve order: full name, index name, name.
// also make sure we only store each mapper once.
Collection<FieldMapper<?>> remainingFieldMappers = new LinkedList<>(allFieldMappers);
Collection<FieldMapper<?>> remainingFieldMappers = Lists.newLinkedList(allFieldMappers);
for (Iterator<FieldMapper<?>> it = remainingFieldMappers.iterator(); it.hasNext(); ) {
final FieldMapper<?> fieldMapper = it.next();
if (Regex.simpleMatch(field, fieldMapper.names().fullName())) {

View File

@ -148,9 +148,9 @@ public class TransportMoreLikeThisAction extends HandledTransportAction<MoreLike
final Set<String> fields = newHashSet();
if (request.fields() != null) {
for (String field : request.fields()) {
FieldMappers fieldMappers = docMapper.mappers().smartName(field);
if (fieldMappers != null) {
fields.add(fieldMappers.mapper().names().indexName());
FieldMapper fieldMapper = docMapper.mappers().smartNameFieldMapper(field);
if (fieldMapper != null) {
fields.add(fieldMapper.names().indexName());
} else {
fields.add(field);
}

View File

@ -363,13 +363,13 @@ public class ShardGetService extends AbstractIndexShardComponent {
SearchLookup searchLookup = null;
for (String field : gFields) {
Object value = null;
FieldMappers fieldMapper = docMapper.mappers().smartName(field);
FieldMapper fieldMapper = docMapper.mappers().smartNameFieldMapper(field);
if (fieldMapper == null) {
if (docMapper.objectMappers().get(field) != null) {
// Only fail if we know it is a object field, missing paths / fields shouldn't fail.
throw new ElasticsearchIllegalArgumentException("field [" + field + "] isn't a leaf field");
}
} else if (!fieldMapper.mapper().fieldType().stored() && !fieldMapper.mapper().isGenerated()) {
} else if (!fieldMapper.fieldType().stored() && !fieldMapper.isGenerated()) {
if (searchLookup == null) {
searchLookup = new SearchLookup(mapperService, fieldDataService, new String[]{type});
LeafSearchLookup leafSearchLookup = searchLookup.getLeafSearchLookup(docIdAndVersion.context);
@ -380,7 +380,7 @@ public class ShardGetService extends AbstractIndexShardComponent {
List<Object> values = searchLookup.source().extractRawValues(field);
if (!values.isEmpty()) {
for (int i = 0; i < values.size(); i++) {
values.set(i, fieldMapper.mapper().valueForSearch(values.get(i)));
values.set(i, fieldMapper.valueForSearch(values.get(i)));
}
value = values;
}

View File

@ -21,21 +21,20 @@ package org.elasticsearch.index.mapper;
import com.google.common.base.Function;
import com.google.common.collect.Collections2;
import com.google.common.collect.ForwardingSet;
import com.google.common.collect.Maps;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.FieldNameAnalyzer;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
*
*/
public final class DocumentFieldMappers extends ForwardingSet<FieldMapper<?>> {
public final class DocumentFieldMappers implements Iterable<FieldMapper<?>> {
private final FieldMappersLookup fieldMappers;
@ -104,7 +103,7 @@ public final class DocumentFieldMappers extends ForwardingSet<FieldMapper<?>> {
* Tries to find first based on {@link #fullName(String)}, then by {@link #indexName(String)}, and last
* by {@link #name(String)}.
*/
public FieldMappers smartName(String name) {
FieldMappers smartName(String name) {
return fieldMappers.smartName(name);
}
@ -140,8 +139,7 @@ public final class DocumentFieldMappers extends ForwardingSet<FieldMapper<?>> {
return this.searchQuoteAnalyzer;
}
@Override
protected Set<FieldMapper<?>> delegate() {
return fieldMappers;
public Iterator<FieldMapper<?>> iterator() {
return fieldMappers.iterator();
}
}

View File

@ -489,14 +489,14 @@ public class DocumentMapper implements ToXContent {
// lock to avoid concurrency issues with mapping updates coming from the API
synchronized(this) {
// simulate on the first time to check if the mapping update is applicable
MergeContext mergeContext = newMmergeContext(new MergeFlags().simulate(true));
MergeContext mergeContext = newMergeContext(new MergeFlags().simulate(true));
rootObjectMapper.merge(update, mergeContext);
if (mergeContext.hasConflicts()) {
throw new MapperParsingException("Could not apply generated dynamic mappings: " + Arrays.toString(mergeContext.buildConflicts()));
} else {
// then apply it for real
mappingsModified = true;
mergeContext = newMmergeContext(new MergeFlags().simulate(false));
mergeContext = newMergeContext(new MergeFlags().simulate(false));
rootObjectMapper.merge(update, mergeContext);
}
}
@ -665,7 +665,7 @@ public class DocumentMapper implements ToXContent {
rootObjectMapper.traverse(listener);
}
private MergeContext newMmergeContext(MergeFlags mergeFlags) {
private MergeContext newMergeContext(MergeFlags mergeFlags) {
return new MergeContext(mergeFlags) {
List<String> conflicts = new ArrayList<>();
@ -699,7 +699,7 @@ public class DocumentMapper implements ToXContent {
}
public synchronized MergeResult merge(DocumentMapper mergeWith, MergeFlags mergeFlags) {
final MergeContext mergeContext = newMmergeContext(mergeFlags);
final MergeContext mergeContext = newMergeContext(mergeFlags);
assert rootMappers.size() == mergeWith.rootMappers.size();
rootObjectMapper.merge(mergeWith.rootObjectMapper, mergeContext);

View File

@ -19,21 +19,20 @@
package org.elasticsearch.index.mapper;
import com.google.common.collect.ForwardingSet;
import com.google.common.collect.Lists;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
import org.elasticsearch.common.collect.CopyOnWriteHashSet;
import org.elasticsearch.common.regex.Regex;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
/**
* A class that holds a map of field mappers from name, index name, and full name.
*/
public class FieldMappersLookup extends ForwardingSet<FieldMapper<?>> {
class FieldMappersLookup implements Iterable<FieldMapper<?>> {
private static CopyOnWriteHashMap<String, FieldMappers> add(CopyOnWriteHashMap<String, FieldMappers> map, String key, FieldMapper<?> mapper) {
FieldMappers mappers = map.get(key);
@ -45,72 +44,36 @@ public class FieldMappersLookup extends ForwardingSet<FieldMapper<?>> {
return map.copyAndPut(key, mappers);
}
private static CopyOnWriteHashMap<String, FieldMappers> remove(CopyOnWriteHashMap<String, FieldMappers> map, String key, FieldMapper<?> mapper) {
FieldMappers mappers = map.get(key);
if (mappers == null) {
return map;
}
mappers = mappers.remove(mapper);
if (mappers.isEmpty()) {
return map.copyAndRemove(key);
} else {
return map.copyAndPut(key, mappers);
}
}
private static class MappersLookup {
final CopyOnWriteHashMap<String, FieldMappers> name, indexName, fullName;
final CopyOnWriteHashMap<String, FieldMappers> indexName, fullName;
MappersLookup(CopyOnWriteHashMap<String, FieldMappers> name, CopyOnWriteHashMap<String,
FieldMappers> indexName, CopyOnWriteHashMap<String, FieldMappers> fullName) {
this.name = name;
MappersLookup(CopyOnWriteHashMap<String, FieldMappers> indexName, CopyOnWriteHashMap<String, FieldMappers> fullName) {
this.indexName = indexName;
this.fullName = fullName;
}
MappersLookup addNewMappers(Iterable<? extends FieldMapper<?>> mappers) {
CopyOnWriteHashMap<String, FieldMappers> name = this.name;
CopyOnWriteHashMap<String, FieldMappers> indexName = this.indexName;
CopyOnWriteHashMap<String, FieldMappers> fullName = this.fullName;
for (FieldMapper<?> mapper : mappers) {
name = add(name, mapper.names().name(), mapper);
indexName = add(indexName, mapper.names().indexName(), mapper);
fullName = add(fullName, mapper.names().fullName(), mapper);
}
return new MappersLookup(name, indexName, fullName);
return new MappersLookup(indexName, fullName);
}
MappersLookup removeMappers(Iterable<?> mappers) {
CopyOnWriteHashMap<String, FieldMappers> name = this.name;
CopyOnWriteHashMap<String, FieldMappers> indexName = this.indexName;
CopyOnWriteHashMap<String, FieldMappers> fullName = this.fullName;
for (Object o : mappers) {
if (!(o instanceof FieldMapper)) {
continue;
}
FieldMapper<?> mapper = (FieldMapper<?>) o;
name = remove(name, mapper.names().name(), mapper);
indexName = remove(indexName, mapper.names().indexName(), mapper);
fullName = remove(fullName, mapper.names().fullName(), mapper);
}
return new MappersLookup(name, indexName, fullName);
}
}
private final CopyOnWriteHashSet<FieldMapper<?>> mappers;
private final MappersLookup lookup;
/** Create a new empty instance. */
public FieldMappersLookup() {
this(new CopyOnWriteHashSet<FieldMapper<?>>(),
new MappersLookup(new CopyOnWriteHashMap<String, FieldMappers>(),
new CopyOnWriteHashMap<String, FieldMappers>(),
this(new MappersLookup(new CopyOnWriteHashMap<String, FieldMappers>(),
new CopyOnWriteHashMap<String, FieldMappers>()));
}
private FieldMappersLookup(CopyOnWriteHashSet<FieldMapper<?>> mappers, MappersLookup lookup) {
this.mappers = mappers;
private FieldMappersLookup(MappersLookup lookup) {
this.lookup = lookup;
}
@ -118,19 +81,7 @@ public class FieldMappersLookup extends ForwardingSet<FieldMapper<?>> {
* Return a new instance that contains the union of this instance and the provided mappers.
*/
public FieldMappersLookup copyAndAddAll(Collection<? extends FieldMapper<?>> newMappers) {
return new FieldMappersLookup(mappers.copyAndAddAll(newMappers), lookup.addNewMappers(newMappers));
}
/**
* Return a new instance that contains this instance minus the provided mappers.
*/
public FieldMappersLookup copyAndRemoveAll(Collection<?> mappersToRemove) {
final CopyOnWriteHashSet<FieldMapper<?>> newMappers = mappers.copyAndRemoveAll(mappersToRemove);
if (newMappers != mappers) {
return new FieldMappersLookup(newMappers, lookup.removeMappers(mappersToRemove));
} else {
return this;
}
return new FieldMappersLookup(lookup.addNewMappers(newMappers));
}
/**
@ -152,7 +103,7 @@ public class FieldMappersLookup extends ForwardingSet<FieldMapper<?>> {
*/
public List<String> simpleMatchToIndexNames(String pattern) {
List<String> fields = Lists.newArrayList();
for (FieldMapper<?> fieldMapper : mappers) {
for (FieldMapper<?> fieldMapper : this) {
if (Regex.simpleMatch(pattern, fieldMapper.names().fullName())) {
fields.add(fieldMapper.names().indexName());
} else if (Regex.simpleMatch(pattern, fieldMapper.names().indexName())) {
@ -167,7 +118,7 @@ public class FieldMappersLookup extends ForwardingSet<FieldMapper<?>> {
*/
public List<String> simpleMatchToFullName(String pattern) {
List<String> fields = Lists.newArrayList();
for (FieldMapper<?> fieldMapper : mappers) {
for (FieldMapper<?> fieldMapper : this) {
if (Regex.simpleMatch(pattern, fieldMapper.names().fullName())) {
fields.add(fieldMapper.names().fullName());
} else if (Regex.simpleMatch(pattern, fieldMapper.names().indexName())) {
@ -181,7 +132,7 @@ public class FieldMappersLookup extends ForwardingSet<FieldMapper<?>> {
* Tries to find first based on {@link #fullName(String)}, then by {@link #indexName(String)}.
*/
@Nullable
public FieldMappers smartName(String name) {
FieldMappers smartName(String name) {
FieldMappers fieldMappers = fullName(name);
if (fieldMappers != null) {
return fieldMappers;
@ -202,8 +153,28 @@ public class FieldMappersLookup extends ForwardingSet<FieldMapper<?>> {
return fieldMappers.mapper();
}
public Iterator<FieldMapper<?>> iterator() {
final Iterator<FieldMappers> fieldsItr = lookup.fullName.values().iterator();
if (fieldsItr.hasNext() == false) {
return Collections.emptyIterator();
}
return new Iterator<FieldMapper<?>>() {
Iterator<FieldMapper> fieldValuesItr = fieldsItr.next().iterator();
@Override
protected Set<FieldMapper<?>> delegate() {
return mappers;
public boolean hasNext() {
return fieldsItr.hasNext() || fieldValuesItr.hasNext();
}
@Override
public FieldMapper next() {
if (fieldValuesItr.hasNext() == false && fieldsItr.hasNext()) {
fieldValuesItr = fieldsItr.next().iterator();
}
return fieldValuesItr.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove field mapper from lookup");
}
};
}
}

View File

@ -91,7 +91,7 @@ public class MapperService extends AbstractIndexComponent {
public static final String DEFAULT_MAPPING = "_default_";
private static ObjectOpenHashSet<String> META_FIELDS = ObjectOpenHashSet.from(
"_uid", "_id", "_type", "_all", "_analyzer", "_parent", "_routing", "_index",
"_uid", "_id", "_type", "_all", "_parent", "_routing", "_index",
"_size", "_timestamp", "_ttl"
);
private final AnalysisService analysisService;
@ -105,7 +105,6 @@ public class MapperService extends AbstractIndexComponent {
private volatile String defaultMappingSource;
private volatile String defaultPercolatorMappingSource;
private volatile Map<String, DocumentMapper> mappers = ImmutableMap.of();
private final Object typeMutex = new Object();
@ -395,42 +394,6 @@ public class MapperService extends AbstractIndexComponent {
}
}
public void remove(String type) {
synchronized (typeMutex) {
DocumentMapper docMapper = mappers.get(type);
if (docMapper == null) {
return;
}
docMapper.close();
mappers = newMapBuilder(mappers).remove(type).map();
removeObjectAndFieldMappers(docMapper);
for (DocumentTypeListener typeListener : typeListeners) {
typeListener.afterRemove(docMapper);
}
}
}
private void removeObjectAndFieldMappers(DocumentMapper docMapper) {
synchronized (mappersMutex) {
fieldMappers = fieldMappers.copyAndRemoveAll(docMapper.mappers());
ImmutableOpenMap.Builder<String, ObjectMappers> fullPathObjectMappers = ImmutableOpenMap.builder(this.fullPathObjectMappers);
for (ObjectMapper mapper : docMapper.objectMappers().values()) {
ObjectMappers mappers = fullPathObjectMappers.get(mapper.fullPath());
if (mappers != null) {
mappers = mappers.remove(mapper);
if (mappers.isEmpty()) {
fullPathObjectMappers.remove(mapper.fullPath());
} else {
fullPathObjectMappers.put(mapper.fullPath(), mappers);
}
}
}
this.fullPathObjectMappers = fullPathObjectMappers.build();
}
}
public DocumentMapper parse(String mappingType, CompressedString mappingSource, boolean applyDefault) throws MapperParsingException {
String defaultMappingSource;
if (PercolatorService.TYPE_NAME.equals(mappingType)) {

View File

@ -24,7 +24,6 @@ import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableList;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
@ -357,16 +356,6 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
this.copyTo = copyTo;
}
@Nullable
protected String defaultPostingFormat() {
return null;
}
@Nullable
protected String defaultDocValuesFormat() {
return null;
}
protected boolean defaultDocValues() {
if (indexCreatedBefore2x) {
return Defaults.PRE_2X_DOC_VALUES;

View File

@ -129,11 +129,6 @@ public class UidFieldMapper extends AbstractFieldMapper<Uid> implements Internal
return new FieldDataType("string");
}
@Override
protected String defaultPostingFormat() {
return "default";
}
@Override
public void preParse(ParseContext context) throws IOException {
// if we have the id provided, fill it, and parse now

View File

@ -386,14 +386,6 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
typesToRefresh.toArray(new String[typesToRefresh.size()]), event.state().nodes().localNodeId())
);
}
// go over and remove mappings
for (DocumentMapper documentMapper : mapperService.docMappers(true)) {
if (seenMappings.containsKey(new Tuple<>(index, documentMapper.type())) && !indexMetaData.mappings().containsKey(documentMapper.type())) {
// we have it in our mappings, but not in the metadata, and we have seen it in the cluster state, remove it
mapperService.remove(documentMapper.type());
seenMappings.remove(new Tuple<>(index, documentMapper.type()));
}
}
} catch (Throwable t) {
// if we failed the mappings anywhere, we need to fail the shards for this index, note, we safeguard
// by creating the processing the mappings on the master, or on the node the mapping was introduced on,

View File

@ -67,12 +67,12 @@ public class TokenCountFieldMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2, mergeFlags().simulate(true));
assertThat(mergeResult.hasConflicts(), equalTo(false));
// Just simulated so merge hasn't happened yet
assertThat(((TokenCountFieldMapper) stage1.mappers().smartName("tc").mapper()).analyzer(), equalTo("keyword"));
assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword"));
mergeResult = stage1.merge(stage2, mergeFlags().simulate(false));
assertThat(mergeResult.hasConflicts(), equalTo(false));
// Just simulated so merge hasn't happened yet
assertThat(((TokenCountFieldMapper) stage1.mappers().smartName("tc").mapper()).analyzer(), equalTo("standard"));
assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard"));
}
@Test

View File

@ -53,11 +53,11 @@ public class SimpleDynamicTemplatesTests extends ElasticsearchSingleNodeTest {
DocumentFieldMappers mappers = docMapper.mappers();
assertThat(mappers.smartName("s"), Matchers.notNullValue());
assertEquals(IndexOptions.NONE, mappers.smartName("s").mapper().fieldType().indexOptions());
assertThat(mappers.smartNameFieldMapper("s"), Matchers.notNullValue());
assertEquals(IndexOptions.NONE, mappers.smartNameFieldMapper("s").fieldType().indexOptions());
assertThat(mappers.smartName("l"), Matchers.notNullValue());
assertNotSame(IndexOptions.NONE, mappers.smartName("l").mapper().fieldType().indexOptions());
assertThat(mappers.smartNameFieldMapper("l"), Matchers.notNullValue());
assertNotSame(IndexOptions.NONE, mappers.smartNameFieldMapper("l").fieldType().indexOptions());
}

View File

@ -99,7 +99,7 @@ public class GeohashMappingGeoPointTests extends ElasticsearchSingleNodeTest {
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash_precision", 10).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
FieldMapper mapper = defaultMapper.mappers().smartName("point").mapper();
FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point");
assertThat(mapper, instanceOf(GeoPointFieldMapper.class));
GeoPointFieldMapper geoPointFieldMapper = (GeoPointFieldMapper) mapper;
assertThat(geoPointFieldMapper.geoHashPrecision(), is(10));
@ -111,7 +111,7 @@ public class GeohashMappingGeoPointTests extends ElasticsearchSingleNodeTest {
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash_precision", "5m").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
FieldMapper mapper = defaultMapper.mappers().smartName("point").mapper();
FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point");
assertThat(mapper, instanceOf(GeoPointFieldMapper.class));
GeoPointFieldMapper geoPointFieldMapper = (GeoPointFieldMapper) mapper;
assertThat(geoPointFieldMapper.geoHashPrecision(), is(10));

View File

@ -64,25 +64,25 @@ public class DoubleIndexingDocTest extends ElasticsearchSingleNodeTest {
IndexReader reader = DirectoryReader.open(writer, true);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(mapper.mappers().smartName("field1").mapper().termQuery("value1", null), 10);
TopDocs topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field1").termQuery("value1", null), 10);
assertThat(topDocs.totalHits, equalTo(2));
topDocs = searcher.search(mapper.mappers().smartName("field2").mapper().termQuery("1", null), 10);
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field2").termQuery("1", null), 10);
assertThat(topDocs.totalHits, equalTo(2));
topDocs = searcher.search(mapper.mappers().smartName("field3").mapper().termQuery("1.1", null), 10);
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field3").termQuery("1.1", null), 10);
assertThat(topDocs.totalHits, equalTo(2));
topDocs = searcher.search(mapper.mappers().smartName("field4").mapper().termQuery("2010-01-01", null), 10);
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field4").termQuery("2010-01-01", null), 10);
assertThat(topDocs.totalHits, equalTo(2));
topDocs = searcher.search(mapper.mappers().smartName("field5").mapper().termQuery("1", null), 10);
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").termQuery("1", null), 10);
assertThat(topDocs.totalHits, equalTo(2));
topDocs = searcher.search(mapper.mappers().smartName("field5").mapper().termQuery("2", null), 10);
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").termQuery("2", null), 10);
assertThat(topDocs.totalHits, equalTo(2));
topDocs = searcher.search(mapper.mappers().smartName("field5").mapper().termQuery("3", null), 10);
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").termQuery("3", null), 10);
assertThat(topDocs.totalHits, equalTo(2));
writer.close();
reader.close();

View File

@ -54,15 +54,15 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2, mergeFlags().simulate(true));
assertThat(mergeResult.hasConflicts(), equalTo(false));
// since we are simulating, we should not have the age mapping
assertThat(stage1.mappers().smartName("age"), nullValue());
assertThat(stage1.mappers().smartName("obj1.prop1"), nullValue());
assertThat(stage1.mappers().smartNameFieldMapper("age"), nullValue());
assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), nullValue());
// now merge, don't simulate
mergeResult = stage1.merge(stage2, mergeFlags().simulate(false));
// there is still merge failures
assertThat(mergeResult.hasConflicts(), equalTo(false));
// but we have the age in
assertThat(stage1.mappers().smartName("age"), notNullValue());
assertThat(stage1.mappers().smartName("obj1.prop1"), notNullValue());
assertThat(stage1.mappers().smartNameFieldMapper("age"), notNullValue());
assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), notNullValue());
}
@Test