remove clearUnreferenced from caches, no longer need it

This commit is contained in:
kimchy 2011-04-09 14:20:18 +03:00
parent 49e2f55abd
commit 991683efa6
13 changed files with 18 additions and 101 deletions

View File

@ -122,13 +122,6 @@ public class IndexCache extends AbstractIndexComponent implements CloseableCompo
bloomCache.clear();
}
public void clearUnreferenced() {
filterCache.clearUnreferenced();
fieldDataCache.clearUnreferenced();
idCache.clearUnreferenced();
bloomCache.clearUnreferenced();
}
@Override public void clusterChanged(ClusterChangedEvent event) {
// clear the query parser cache if the metadata (mappings) changed...
if (event.metaDataChanged()) {

View File

@ -39,8 +39,6 @@ public interface BloomCache extends IndexComponent, CloseableComponent {
void clear(IndexReader reader);
void clearUnreferenced();
long sizeInBytes();
long sizeInBytes(String fieldName);

View File

@ -53,9 +53,6 @@ public class NonBloomCache extends AbstractIndexComponent implements BloomCache
@Override public void clear(IndexReader reader) {
}
@Override public void clearUnreferenced() {
}
@Override public long sizeInBytes() {
return 0;
}

View File

@ -90,10 +90,6 @@ public class SimpleBloomCache extends AbstractIndexComponent implements BloomCac
}
}
@Override public void clearUnreferenced() {
// nothing to do here...
}
@Override public long sizeInBytes() {
// the overhead of the map is not really relevant...
long sizeInBytes = 0;

View File

@ -40,8 +40,6 @@ public interface FieldDataCache extends IndexComponent, CloseableComponent {
void clear(IndexReader reader);
void clearUnreferenced();
long evictions();
long sizeInBytes();

View File

@ -56,9 +56,6 @@ public class NoneFieldDataCache extends AbstractIndexComponent implements FieldD
@Override public void clear(IndexReader reader) {
}
@Override public void clearUnreferenced() {
}
@Override public void close() throws ElasticSearchException {
}

View File

@ -70,10 +70,6 @@ public abstract class AbstractConcurrentMapFieldDataCache extends AbstractIndexC
}
}
@Override public void clearUnreferenced() {
// nothing to do here...
}
@Override public long sizeInBytes() {
// the overhead of the map is not really relevant...
long sizeInBytes = 0;

View File

@ -39,11 +39,6 @@ public interface FilterCache extends IndexComponent, CloseableComponent {
void clear();
/**
* Clears unreferenced filters.
*/
void clearUnreferenced();
long count();
long sizeInBytes();

View File

@ -62,10 +62,6 @@ public class NoneFilterCache extends AbstractIndexComponent implements FilterCac
// nothing to do here
}
@Override public void clearUnreferenced() {
// nothing to do here
}
@Override public long count() {
return 0;
}

View File

@ -68,26 +68,6 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
}
}
@Override public void clearUnreferenced() {
// can't do this, since we cache on cacheKey...
// int totalCount = cache.size();
// int cleaned = 0;
// for (Iterator<IndexReader> readerIt = cache.keySet().iterator(); readerIt.hasNext();) {
// IndexReader reader = readerIt.next();
// if (reader.getRefCount() <= 0) {
// readerIt.remove();
// cleaned++;
// }
// }
// if (logger.isDebugEnabled()) {
// if (cleaned > 0) {
// logger.debug("Cleaned [{}] out of estimated total [{}]", cleaned, totalCount);
// }
// } else if (logger.isTraceEnabled()) {
// logger.trace("Cleaned [{}] out of estimated total [{}]", cleaned, totalCount);
// }
}
@Override public long sizeInBytes() {
long sizeInBytes = 0;
for (ConcurrentMap<Filter, DocSet> map : cache.values()) {

View File

@ -32,11 +32,6 @@ public interface IdCache extends IndexComponent, CloseableComponent, Iterable<Id
void clear(IndexReader reader);
/**
* Clears unreferenced readers.
*/
void clearUnreferenced();
void refresh(IndexReader[] readers) throws Exception;
IdReaderCache reader(IndexReader reader);

View File

@ -75,10 +75,6 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, In
idReaders.remove(reader.getCoreCacheKey());
}
@Override public void clearUnreferenced() {
// nothing to do here...
}
@Override public IdReaderCache reader(IndexReader reader) {
return idReaders.get(reader.getCoreCacheKey());
}

View File

@ -22,7 +22,6 @@ package org.elasticsearch.index.service;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.ElasticSearchInterruptedException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.collect.ImmutableSet;
import org.elasticsearch.common.collect.UnmodifiableIterator;
@ -118,8 +117,6 @@ public class InternalIndexService extends AbstractIndexComponent implements Inde
private volatile boolean closed = false;
private final CleanCacheOnIndicesLifecycleListener cleanCacheOnIndicesLifecycleListener = new CleanCacheOnIndicesLifecycleListener();
@Inject public InternalIndexService(Injector injector, Index index, @IndexSettings Settings indexSettings, NodeEnvironment nodeEnv, ThreadPool threadPool,
PercolatorService percolatorService, AnalysisService analysisService, MapperService mapperService, IndexQueryParserService queryParserService, SimilarityService similarityService,
IndexCache indexCache, IndexEngine indexEngine, IndexGateway indexGateway, IndexStore indexStore) {
@ -140,8 +137,6 @@ public class InternalIndexService extends AbstractIndexComponent implements Inde
this.pluginsService = injector.getInstance(PluginsService.class);
this.indicesLifecycle = (InternalIndicesLifecycle) injector.getInstance(IndicesLifecycle.class);
this.indicesLifecycle.addListener(cleanCacheOnIndicesLifecycleListener);
}
@Override public int numberOfShards() {
@ -216,29 +211,25 @@ public class InternalIndexService extends AbstractIndexComponent implements Inde
synchronized (this) {
closed = true;
}
try {
Set<Integer> shardIds = shardIds();
final CountDownLatch latch = new CountDownLatch(shardIds.size());
for (final int shardId : shardIds) {
threadPool.cached().execute(new Runnable() {
@Override public void run() {
try {
deleteShard(shardId, delete, !delete, delete, reason);
} catch (Exception e) {
logger.warn("failed to close shard, delete [{}]", e, delete);
} finally {
latch.countDown();
}
Set<Integer> shardIds = shardIds();
final CountDownLatch latch = new CountDownLatch(shardIds.size());
for (final int shardId : shardIds) {
threadPool.cached().execute(new Runnable() {
@Override public void run() {
try {
deleteShard(shardId, delete, !delete, delete, reason);
} catch (Exception e) {
logger.warn("failed to close shard, delete [{}]", e, delete);
} finally {
latch.countDown();
}
});
}
try {
latch.await();
} catch (InterruptedException e) {
throw new ElasticSearchInterruptedException("interrupted closing index [ " + index().name() + "]", e);
}
} finally {
indicesLifecycle.removeListener(cleanCacheOnIndicesLifecycleListener);
}
});
}
try {
latch.await();
} catch (InterruptedException e) {
throw new ElasticSearchInterruptedException("interrupted closing index [ " + index().name() + "]", e);
}
}
@ -419,15 +410,4 @@ public class InternalIndexService extends AbstractIndexComponent implements Inde
FileSystemUtils.deleteRecursively(nodeEnv.shardLocation(sId));
}
}
class CleanCacheOnIndicesLifecycleListener extends IndicesLifecycle.Listener {
@Override public void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, boolean delete) {
indexCache.clearUnreferenced();
}
@Override public void afterIndexShardClosed(ShardId shardId, boolean delete) {
indexCache.clearUnreferenced();
}
}
}