Add FieldMappers before adding Mappers (used for parsing) and while holding the relevant's ObjectMapper mutex
Adding the mappers first could cause the wrong FieldMappers to be used during object parsing Also: Removed MergeContext.newFieldMappers, MergeContext.newObjectMappers and relatives as they are not used anymore. Similarly removed MergeContext.addFieldDataChange & fieldMapperChanges as they were not used. Closes #3844
This commit is contained in:
parent
a03013ea25
commit
a10c195667
|
@ -572,7 +572,7 @@ public class DocumentMapper implements ToXContent {
|
|||
addFieldMappers(fieldMappers.toArray(new FieldMapper[fieldMappers.size()]));
|
||||
}
|
||||
|
||||
private void addFieldMappers(FieldMapper... fieldMappers) {
|
||||
public void addFieldMappers(FieldMapper... fieldMappers) {
|
||||
synchronized (mappersMutex) {
|
||||
this.fieldMappers = this.fieldMappers.concat(this, fieldMappers);
|
||||
}
|
||||
|
@ -644,12 +644,6 @@ public class DocumentMapper implements ToXContent {
|
|||
}
|
||||
|
||||
if (!mergeFlags.simulate()) {
|
||||
if (!mergeContext.newFieldMappers().mappers.isEmpty()) {
|
||||
addFieldMappers(mergeContext.newFieldMappers().mappers);
|
||||
}
|
||||
if (!mergeContext.newObjectMappers().mappers.isEmpty()) {
|
||||
addObjectMappers(mergeContext.newObjectMappers().mappers);
|
||||
}
|
||||
// let the merge with attributes to override the attributes
|
||||
meta = mergeWith.meta();
|
||||
// update the source of the merged one
|
||||
|
|
|
@ -21,8 +21,6 @@ package org.elasticsearch.index.mapper;
|
|||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
|
@ -34,11 +32,6 @@ public class MergeContext {
|
|||
private final DocumentMapper.MergeFlags mergeFlags;
|
||||
private final List<String> mergeConflicts = Lists.newArrayList();
|
||||
|
||||
private final FieldMapperListener.Aggregator newFieldMappers = new FieldMapperListener.Aggregator();
|
||||
private final ObjectMapperListener.Aggregator newObjectMappers = new ObjectMapperListener.Aggregator();
|
||||
|
||||
private List<FieldMapper> fieldDataChanges = null;
|
||||
|
||||
public MergeContext(DocumentMapper documentMapper, DocumentMapper.MergeFlags mergeFlags) {
|
||||
this.documentMapper = documentMapper;
|
||||
this.mergeFlags = mergeFlags;
|
||||
|
@ -52,14 +45,6 @@ public class MergeContext {
|
|||
return mergeFlags;
|
||||
}
|
||||
|
||||
public FieldMapperListener.Aggregator newFieldMappers() {
|
||||
return newFieldMappers;
|
||||
}
|
||||
|
||||
public ObjectMapperListener.Aggregator newObjectMappers() {
|
||||
return newObjectMappers;
|
||||
}
|
||||
|
||||
public void addConflict(String mergeFailure) {
|
||||
mergeConflicts.add(mergeFailure);
|
||||
}
|
||||
|
@ -71,18 +56,4 @@ public class MergeContext {
|
|||
public String[] buildConflicts() {
|
||||
return mergeConflicts.toArray(new String[mergeConflicts.size()]);
|
||||
}
|
||||
|
||||
public void addFieldDataChange(FieldMapper mapper) {
|
||||
if (fieldDataChanges == null) {
|
||||
fieldDataChanges = new ArrayList<FieldMapper>();
|
||||
}
|
||||
fieldDataChanges.add(mapper);
|
||||
}
|
||||
|
||||
public List<FieldMapper> fieldMapperChanges() {
|
||||
if (fieldDataChanges == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
return fieldDataChanges;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -578,7 +578,6 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T>, Mapper {
|
|||
this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(),
|
||||
ImmutableSettings.builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings)
|
||||
);
|
||||
mergeContext.addFieldDataChange(this);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,17 +21,14 @@ package org.elasticsearch.index.mapper.multifield;
|
|||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
import java.util.*;
|
||||
|
||||
import static com.google.common.collect.Lists.newArrayList;
|
||||
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
|
||||
|
@ -227,18 +224,25 @@ public class MultiFieldMapper implements Mapper, AllFieldMapper.IncludeInAll {
|
|||
// its a single field mapper, upgraded into a multi field mapper, just update the default mapper
|
||||
if (defaultMapper == null) {
|
||||
if (!mergeContext.mergeFlags().simulate()) {
|
||||
defaultMapper = mergeWith;
|
||||
mergeContext.newFieldMappers().mappers.add((FieldMapper) defaultMapper);
|
||||
mergeContext.docMapper().addFieldMappers((FieldMapper) defaultMapper);
|
||||
defaultMapper = mergeWith; // only set & expose it after adding fieldmapper
|
||||
}
|
||||
}
|
||||
} else {
|
||||
MultiFieldMapper mergeWithMultiField = (MultiFieldMapper) mergeWith;
|
||||
|
||||
List<FieldMapper> newFieldMappers = null;
|
||||
MapBuilder<String, Mapper> newMappersBuilder = null;
|
||||
Mapper newDefaultMapper = null;
|
||||
// merge the default mapper
|
||||
if (defaultMapper == null) {
|
||||
if (mergeWithMultiField.defaultMapper != null) {
|
||||
if (!mergeContext.mergeFlags().simulate()) {
|
||||
defaultMapper = mergeWithMultiField.defaultMapper;
|
||||
mergeContext.newFieldMappers().mappers.add((FieldMapper) defaultMapper);
|
||||
if (newFieldMappers == null) {
|
||||
newFieldMappers = new ArrayList<FieldMapper>();
|
||||
}
|
||||
newFieldMappers.add((FieldMapper) defaultMapper);
|
||||
newDefaultMapper = mergeWithMultiField.defaultMapper;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -257,15 +261,33 @@ public class MultiFieldMapper implements Mapper, AllFieldMapper.IncludeInAll {
|
|||
if (mergeWithMapper instanceof AllFieldMapper.IncludeInAll) {
|
||||
((AllFieldMapper.IncludeInAll) mergeWithMapper).includeInAll(false);
|
||||
}
|
||||
mappers = newMapBuilder(mappers).put(mergeWithMapper.name(), mergeWithMapper).immutableMap();
|
||||
if (newMappersBuilder == null) {
|
||||
newMappersBuilder = newMapBuilder(mappers);
|
||||
}
|
||||
newMappersBuilder.put(mergeWithMapper.name(), mergeWithMapper);
|
||||
if (mergeWithMapper instanceof AbstractFieldMapper) {
|
||||
mergeContext.newFieldMappers().mappers.add((FieldMapper) mergeWithMapper);
|
||||
if (newFieldMappers == null) {
|
||||
newFieldMappers = new ArrayList<FieldMapper>();
|
||||
}
|
||||
newFieldMappers.add((FieldMapper) mergeWithMapper);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
mergeIntoMapper.merge(mergeWithMapper, mergeContext);
|
||||
}
|
||||
}
|
||||
|
||||
// first add all field mappers
|
||||
if (newFieldMappers != null && !newFieldMappers.isEmpty()) {
|
||||
mergeContext.docMapper().addFieldMappers(newFieldMappers);
|
||||
}
|
||||
// now publish mappers
|
||||
if (newDefaultMapper != null) {
|
||||
defaultMapper = newDefaultMapper;
|
||||
}
|
||||
if (newMappersBuilder != null) {
|
||||
mappers = newMappersBuilder.immutableMap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -831,25 +831,30 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
|
|||
|
||||
doMerge(mergeWithObject, mergeContext);
|
||||
|
||||
List<Mapper> mappersToTraverse = new ArrayList<Mapper>();
|
||||
List<Mapper> mappersToPut = new ArrayList<Mapper>();
|
||||
FieldMapperListener.Aggregator newFieldMappers = new FieldMapperListener.Aggregator();
|
||||
ObjectMapperListener.Aggregator newObjectMappers = new ObjectMapperListener.Aggregator();
|
||||
synchronized (mutex) {
|
||||
for (Mapper mergeWithMapper : mergeWithObject.mappers.values()) {
|
||||
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
|
||||
if (mergeIntoMapper == null) {
|
||||
// no mapping, simply add it if not simulating
|
||||
if (!mergeContext.mergeFlags().simulate()) {
|
||||
putMapper(mergeWithMapper);
|
||||
mappersToTraverse.add(mergeWithMapper);
|
||||
mappersToPut.add(mergeWithMapper);
|
||||
mergeWithMapper.traverse(newFieldMappers);
|
||||
mergeWithMapper.traverse(newObjectMappers);
|
||||
}
|
||||
} else {
|
||||
if ((mergeWithMapper instanceof MultiFieldMapper) && !(mergeIntoMapper instanceof MultiFieldMapper)) {
|
||||
MultiFieldMapper mergeWithMultiField = (MultiFieldMapper) mergeWithMapper;
|
||||
mergeWithMultiField.merge(mergeIntoMapper, mergeContext);
|
||||
if (!mergeContext.mergeFlags().simulate()) {
|
||||
putMapper(mergeWithMultiField);
|
||||
mappersToPut.add(mergeWithMultiField);
|
||||
// now, record mappers to traverse events for all mappers
|
||||
// we don't just traverse mergeWithMultiField as we already have the default handler
|
||||
for (Mapper mapper : mergeWithMultiField.mappers().values()) {
|
||||
mappersToTraverse.add(mapper);
|
||||
mapper.traverse(newFieldMappers);
|
||||
mapper.traverse(newObjectMappers);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -857,12 +862,18 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
|
|||
}
|
||||
}
|
||||
}
|
||||
if (!newFieldMappers.mappers.isEmpty()) {
|
||||
mergeContext.docMapper().addFieldMappers(newFieldMappers.mappers);
|
||||
}
|
||||
if (!newObjectMappers.mappers.isEmpty()) {
|
||||
mergeContext.docMapper().addObjectMappers(newObjectMappers.mappers);
|
||||
}
|
||||
// and the mappers only after the administration have been done, so it will not be visible to parser (which first try to read with no lock)
|
||||
for (Mapper mapper : mappersToPut) {
|
||||
putMapper(mapper);
|
||||
}
|
||||
}
|
||||
// call this outside of the mutex
|
||||
for (Mapper mapper : mappersToTraverse) {
|
||||
mapper.traverse(mergeContext.newFieldMappers());
|
||||
mapper.traverse(mergeContext.newObjectMappers());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected void doMerge(ObjectMapper mergeWith, MergeContext mergeContext) {
|
||||
|
|
|
@ -20,10 +20,8 @@
|
|||
package org.elasticsearch.indices.mapping;
|
||||
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.test.AbstractIntegrationTest;
|
||||
|
@ -43,34 +41,33 @@ public class ConcurrentDynamicTemplateTests extends AbstractIntegrationTest {
|
|||
private final String mappingType = "test-mapping";
|
||||
|
||||
@Test // see #3544
|
||||
@AwaitsFix(bugUrl="Boaz is looking into this test")
|
||||
public void testConcurrentDynamicMapping() throws Exception {
|
||||
final String mapping = "{" + mappingType + ": {" + "\"properties\": {" + "\"an_id\": {"
|
||||
+ "\"type\": \"string\"," + "\"store\": \"yes\"," + "\"index\": \"not_analyzed\"" + "}" + "}," + "\"dynamic_templates\": ["
|
||||
+ "{" + "\"participants\": {" + "\"path_match\": \"*\"," + "\"mapping\": {" + "\"type\": \"string\"," + "\"store\": \"yes\","
|
||||
+ "\"index\": \"analyzed\"," + "\"analyzer\": \"whitespace\"" + "}" + "}" + "}" + "]" + "}" + "}";
|
||||
final String fieldName = "field";
|
||||
final String mapping = "{ \"" + mappingType + "\": {" +
|
||||
"\"dynamic_templates\": ["
|
||||
+ "{ \"" + fieldName + "\": {" + "\"path_match\": \"*\"," + "\"mapping\": {" + "\"type\": \"string\"," + "\"store\": \"yes\","
|
||||
+ "\"index\": \"analyzed\", \"analyzer\": \"whitespace\" } } } ] } }";
|
||||
// The 'fieldNames' array is used to help with retrieval of index terms
|
||||
// after testing
|
||||
|
||||
final String fieldName = "participants.ACCEPTED";
|
||||
int iters = atLeast(5);
|
||||
for (int i = 0; i < iters; i++) {
|
||||
wipeIndex("test");
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.setSettings(
|
||||
ImmutableSettings.settingsBuilder()
|
||||
.put("number_of_shards", between(1,5))
|
||||
.put("number_of_replicas", between(0,1)).build())
|
||||
.addMapping(mappingType, mapping).execute().actionGet();
|
||||
.setSettings(
|
||||
ImmutableSettings.settingsBuilder()
|
||||
.put("number_of_shards", between(1, 5))
|
||||
.put("number_of_replicas", between(0, 1)).build())
|
||||
.addMapping(mappingType, mapping).execute().actionGet();
|
||||
ensureYellow();
|
||||
int numDocs = atLeast(10);
|
||||
final CountDownLatch latch = new CountDownLatch(numDocs);
|
||||
final List<Throwable> throwable = new CopyOnWriteArrayList<Throwable>();
|
||||
int currentID = 0;
|
||||
for (int j = 0; j < numDocs; j++) {
|
||||
Map<String, Object> source = new HashMap<String, Object>();
|
||||
source.put("an_id", Strings.randomBase64UUID(getRandom()));
|
||||
source.put(fieldName, "test-user");
|
||||
client().prepareIndex("test", mappingType).setSource(source).execute(new ActionListener<IndexResponse>() {
|
||||
client().prepareIndex("test", mappingType, Integer.toString(currentID++)).setSource(source).execute(new ActionListener<IndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(IndexResponse response) {
|
||||
latch.countDown();
|
||||
|
|
Loading…
Reference in New Issue