Fixed issue where the parentTypes set would be updated when a new parent type is being added or removed during a refresh, which would have lead to concurrency issues.

This commit is contained in:
Martijn van Groningen 2014-01-03 16:37:03 +01:00
parent 911ef6a058
commit 2cb5cfecec
1 changed files with 15 additions and 17 deletions

View File

@ -47,7 +47,6 @@ import org.elasticsearch.index.shard.service.IndexShard;
import java.io.IOException; import java.io.IOException;
import java.util.*; import java.util.*;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicReference;
/** /**
* *
@ -56,7 +55,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
private final boolean reuse; private final boolean reuse;
private final ConcurrentMap<Object, SimpleIdReaderCache> idReaders; private final ConcurrentMap<Object, SimpleIdReaderCache> idReaders;
private final AtomicReference<NavigableSet<HashedBytesArray>> parentTypesHolder; private final NavigableSet<HashedBytesArray> parentTypes;
IndexService indexService; IndexService indexService;
@ -65,7 +64,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
super(index, indexSettings); super(index, indexSettings);
reuse = componentSettings.getAsBoolean("reuse", false); reuse = componentSettings.getAsBoolean("reuse", false);
idReaders = ConcurrentCollections.newConcurrentMap(); idReaders = ConcurrentCollections.newConcurrentMap();
parentTypesHolder = new AtomicReference<NavigableSet<HashedBytesArray>>(new TreeSet<HashedBytesArray>(UTF8SortedAsUnicodeComparator.utf8SortedAsUnicodeSortOrder)); parentTypes = new TreeSet<HashedBytesArray>(UTF8SortedAsUnicodeComparator.utf8SortedAsUnicodeSortOrder);
} }
@Override @Override
@ -123,7 +122,6 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
// do the refresh // do the refresh
Map<Object, Map<String, TypeBuilder>> builders = new HashMap<Object, Map<String, TypeBuilder>>(); Map<Object, Map<String, TypeBuilder>> builders = new HashMap<Object, Map<String, TypeBuilder>>();
Map<Object, IndexReader> cacheToReader = new HashMap<Object, IndexReader>(); Map<Object, IndexReader> cacheToReader = new HashMap<Object, IndexReader>();
NavigableSet<HashedBytesArray> parentTypes = this.parentTypesHolder.get();
// first, go over and load all the id->doc map for all types // first, go over and load all the id->doc map for all types
for (AtomicReaderContext context : atomicReaderContexts) { for (AtomicReaderContext context : atomicReaderContexts) {
@ -305,25 +303,25 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
@Override @Override
public void beforeCreate(DocumentMapper mapper) { public void beforeCreate(DocumentMapper mapper) {
NavigableSet<HashedBytesArray> parentTypes = parentTypesHolder.get(); synchronized (idReaders) {
ParentFieldMapper parentFieldMapper = mapper.parentFieldMapper(); ParentFieldMapper parentFieldMapper = mapper.parentFieldMapper();
if (parentFieldMapper.active()) { if (parentFieldMapper.active()) {
// A _parent field can never be added to an existing mapping, so a _parent field either exists on // A _parent field can never be added to an existing mapping, so a _parent field either exists on
// a new created or doesn't exists. This is why we can update the known parent types via DocumentTypeListener // a new created or doesn't exists. This is why we can update the known parent types via DocumentTypeListener
if (parentTypes.add(new HashedBytesArray(Strings.toUTF8Bytes(parentFieldMapper.type(), new BytesRef())))) { if (parentTypes.add(new HashedBytesArray(Strings.toUTF8Bytes(parentFieldMapper.type(), new BytesRef())))) {
parentTypesHolder.set(parentTypes); clear();
clear(); }
} }
} }
} }
@Override @Override
public void afterRemove(DocumentMapper mapper) { public void afterRemove(DocumentMapper mapper) {
NavigableSet<HashedBytesArray> parentTypes = parentTypesHolder.get(); synchronized (idReaders) {
ParentFieldMapper parentFieldMapper = mapper.parentFieldMapper(); ParentFieldMapper parentFieldMapper = mapper.parentFieldMapper();
if (parentFieldMapper.active()) { if (parentFieldMapper.active()) {
parentTypes.remove(new HashedBytesArray(Strings.toUTF8Bytes(parentFieldMapper.type(), new BytesRef()))); parentTypes.remove(new HashedBytesArray(Strings.toUTF8Bytes(parentFieldMapper.type(), new BytesRef())));
parentTypesHolder.set(parentTypes); }
} }
} }