improve concurrency control in filter caching, reuse one if it was created already under concurrent request

This commit is contained in:
kimchy 2010-12-22 10:42:38 +02:00
parent d283c158d2
commit 33339ae4b1
2 changed files with 27 additions and 20 deletions

View File

@ -126,7 +126,10 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
ConcurrentMap<Filter, DocSet> cachedFilters = cache.cache.get(reader.getFieldCacheKey());
if (cachedFilters == null) {
cachedFilters = cache.buildFilterMap();
cache.cache.putIfAbsent(reader.getFieldCacheKey(), cachedFilters);
ConcurrentMap<Filter, DocSet> prev = cache.cache.putIfAbsent(reader.getFieldCacheKey(), cachedFilters);
if (prev != null) {
cachedFilters = prev;
}
}
DocSet docSet = cachedFilters.get(filter);
if (docSet != null) {
@ -134,8 +137,11 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
}
DocIdSet docIdSet = filter.getDocIdSet(reader);
docSet = cacheable(reader, docIdSet);
cachedFilters.putIfAbsent(filter, docSet);
return docIdSet;
DocSet prev = cachedFilters.putIfAbsent(filter, docSet);
if (prev != null) {
docSet = prev;
}
return docSet;
}
public String toString() {

View File

@ -127,28 +127,23 @@ public abstract class AbstractDoubleConcurrentMapFilterCache extends AbstractInd
ConcurrentMap<Filter, DocSet> cachedFilters = cache.cache.get(reader.getFieldCacheKey());
if (cachedFilters == null) {
cachedFilters = cache.buildCacheMap();
cache.cache.putIfAbsent(reader.getFieldCacheKey(), cachedFilters);
ConcurrentMap<Filter, DocSet> prev = cache.cache.putIfAbsent(reader.getFieldCacheKey(), cachedFilters);
if (prev != null) {
cachedFilters = prev;
}
}
DocSet docSet = cachedFilters.get(filter);
if (docSet != null) {
return docSet;
}
// check if its in the weak cache, if so, move it from weak to soft
ConcurrentMap<Filter, DocSet> weakCachedFilters = cache.weakCache.get(reader.getFieldCacheKey());
if (weakCachedFilters != null) {
docSet = weakCachedFilters.get(filter);
if (docSet != null) {
cachedFilters.put(filter, docSet);
weakCachedFilters.remove(filter);
return docSet;
}
}
DocIdSet docIdSet = filter.getDocIdSet(reader);
docSet = cacheable(reader, docIdSet);
cachedFilters.putIfAbsent(filter, docSet);
return docIdSet;
DocSet prev = cachedFilters.putIfAbsent(filter, docSet);
if (prev != null) {
docSet = prev;
}
return docSet;
}
public String toString() {
@ -191,7 +186,10 @@ public abstract class AbstractDoubleConcurrentMapFilterCache extends AbstractInd
ConcurrentMap<Filter, DocSet> weakCacheFilters = cache.weakCache.get(reader.getFieldCacheKey());
if (weakCacheFilters == null) {
weakCacheFilters = cache.buildWeakCacheMap();
cache.weakCache.putIfAbsent(reader.getFieldCacheKey(), weakCacheFilters);
ConcurrentMap<Filter, DocSet> prev = cache.weakCache.putIfAbsent(reader.getFieldCacheKey(), weakCacheFilters);
if (prev != null) {
weakCacheFilters = prev;
}
}
docSet = weakCacheFilters.get(filter);
@ -201,8 +199,11 @@ public abstract class AbstractDoubleConcurrentMapFilterCache extends AbstractInd
DocIdSet docIdSet = filter.getDocIdSet(reader);
docSet = cacheable(reader, docIdSet);
weakCacheFilters.putIfAbsent(filter, docSet);
return docIdSet;
DocSet prev = weakCacheFilters.putIfAbsent(filter, docSet);
if (prev != null) {
docSet = prev;
}
return docSet;
}
public String toString() {