no need for weak keys since readers map gets explicitly cleared

This commit is contained in:
kimchy 2011-05-22 07:35:22 +03:00
parent 6f80e306d6
commit 32294a4d4f
6 changed files with 7 additions and 21 deletions

View File

@ -30,7 +30,6 @@ import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.common.Unicode;
import org.elasticsearch.common.bloom.BloomFilter;
import org.elasticsearch.common.bloom.BloomFilterFactory;
import org.elasticsearch.common.collect.MapMaker;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.SizeUnit;
@ -64,10 +63,7 @@ public class SimpleBloomCache extends AbstractIndexComponent implements BloomCac
this.threadPool = threadPool;
this.maxSize = indexSettings.getAsSize("index.cache.bloom.max_size", new SizeValue(500, SizeUnit.MEGA)).singles();
// weak keys is fine, it will only be cleared once IndexReader references will be removed
// (assuming clear(...) will not be called)
this.cache = new MapMaker().weakKeys().makeMap();
this.cache = ConcurrentCollections.newConcurrentMap();
}
@Override public void close() throws ElasticSearchException {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.cache.field.data.support;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.common.collect.MapMaker;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.AbstractIndexComponent;
@ -32,6 +31,7 @@ import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.settings.IndexSettings;
import java.io.IOException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
@ -47,7 +47,7 @@ public abstract class AbstractConcurrentMapFieldDataCache extends AbstractIndexC
super(index, indexSettings);
// weak keys is fine, it will only be cleared once IndexReader references will be removed
// (assuming clear(...) will not be called)
this.cache = new MapMaker().weakKeys().makeMap();
this.cache = new ConcurrentHashMap<Object, ConcurrentMap<String, FieldData>>();
}
@Override public void close() throws ElasticSearchException {

View File

@ -67,11 +67,6 @@ public class SoftFilterCache extends AbstractConcurrentMapFilterCache implements
super.close();
}
@Override protected ConcurrentMap<Object, ReaderValue> buildCache() {
MapMaker mapMaker = new MapMaker().weakKeys();
return mapMaker.makeMap();
}
@Override protected ConcurrentMap<Filter, DocSet> buildFilterMap() {
// DocSet are not really stored with strong reference only when searching on them...
// Filter might be stored in query cache

View File

@ -25,7 +25,6 @@ import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
import org.apache.lucene.util.OpenBitSet;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.common.collect.MapMaker;
import org.elasticsearch.common.lab.LongsLAB;
import org.elasticsearch.common.lucene.docset.DocSet;
import org.elasticsearch.common.lucene.docset.DocSets;
@ -40,6 +39,7 @@ import org.elasticsearch.index.cache.filter.FilterCache;
import org.elasticsearch.index.settings.IndexSettings;
import java.io.IOException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.*;
@ -82,7 +82,7 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
}
protected ConcurrentMap<Object, ReaderValue> buildCache() {
return new MapMaker().weakKeys().makeMap();
return new ConcurrentHashMap<Object, ReaderValue>();
}
protected ConcurrentMap<Filter, DocSet> buildFilterMap() {

View File

@ -67,11 +67,6 @@ public class WeakFilterCache extends AbstractConcurrentMapFilterCache implements
super.close();
}
@Override protected ConcurrentMap<Object, ReaderValue> buildCache() {
MapMaker mapMaker = new MapMaker().weakKeys();
return mapMaker.makeMap();
}
@Override protected ConcurrentMap<Filter, DocSet> buildFilterMap() {
MapMaker mapMaker = new MapMaker().weakValues();
if (maxSize != -1) {

View File

@ -27,11 +27,11 @@ import org.apache.lucene.util.StringHelper;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.common.BytesWrap;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.collect.MapMaker;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.trove.ExtTObjectIntHasMap;
import org.elasticsearch.common.trove.impl.Constants;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.id.IdCache;
@ -56,7 +56,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, In
@Inject public SimpleIdCache(Index index, @IndexSettings Settings indexSettings) {
super(index, indexSettings);
idReaders = new MapMaker().weakKeys().makeMap();
idReaders = ConcurrentCollections.newConcurrentMap();
}
@Override public void close() throws ElasticSearchException {