mirror of https://github.com/apache/lucene.git
SOLR-7355: Switch from ConcurrentLinkedHashMap to Caffeine. Trunk-only change, as it requires java 8.
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1672133 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
eaf4659bfe
commit
7e9b7448d1
|
@ -58,6 +58,9 @@ Other Changes
|
||||||
|
|
||||||
* SOLR-6954: Deprecated SolrClient.shutdown() method removed (Alan Woodward)
|
* SOLR-6954: Deprecated SolrClient.shutdown() method removed (Alan Woodward)
|
||||||
|
|
||||||
|
* SOLR-7355: Switch from Google's ConcurrentLinkedHashMap to Caffeine. Only
|
||||||
|
affects HDFS support. (Ben Manes via Shawn Heisey)
|
||||||
|
|
||||||
================== 5.2.0 ==================
|
================== 5.2.0 ==================
|
||||||
|
|
||||||
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release
|
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release
|
||||||
|
|
|
@ -66,7 +66,7 @@
|
||||||
<dependency org="commons-collections" name="commons-collections" rev="${/commons-collections/commons-collections}" conf="compile.hadoop"/>
|
<dependency org="commons-collections" name="commons-collections" rev="${/commons-collections/commons-collections}" conf="compile.hadoop"/>
|
||||||
|
|
||||||
<dependency org="com.google.protobuf" name="protobuf-java" rev="${/com.google.protobuf/protobuf-java}" conf="compile.hadoop"/>
|
<dependency org="com.google.protobuf" name="protobuf-java" rev="${/com.google.protobuf/protobuf-java}" conf="compile.hadoop"/>
|
||||||
<dependency org="com.googlecode.concurrentlinkedhashmap" name="concurrentlinkedhashmap-lru" rev="${/com.googlecode.concurrentlinkedhashmap/concurrentlinkedhashmap-lru}" conf="compile.hadoop"/>
|
<dependency org="com.github.ben-manes.caffeine" name="caffeine" rev="${/com.github.ben-manes.caffeine/caffeine}" conf="compile.hadoop"/>
|
||||||
<dependency org="org.htrace" name="htrace-core" rev="${/org.htrace/htrace-core}" conf="compile.hadoop"/>
|
<dependency org="org.htrace" name="htrace-core" rev="${/org.htrace/htrace-core}" conf="compile.hadoop"/>
|
||||||
|
|
||||||
<!-- Hadoop DfsMiniCluster Dependencies-->
|
<!-- Hadoop DfsMiniCluster Dependencies-->
|
||||||
|
|
|
@ -21,8 +21,9 @@ import java.nio.ByteBuffer;
|
||||||
import java.util.concurrent.ConcurrentMap;
|
import java.util.concurrent.ConcurrentMap;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap;
|
import com.github.benmanes.caffeine.cache.Cache;
|
||||||
import com.googlecode.concurrentlinkedhashmap.EvictionListener;
|
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||||
|
import com.github.benmanes.caffeine.cache.RemovalListener;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @lucene.experimental
|
* @lucene.experimental
|
||||||
|
@ -31,7 +32,7 @@ public class BlockCache {
|
||||||
|
|
||||||
public static final int _128M = 134217728;
|
public static final int _128M = 134217728;
|
||||||
public static final int _32K = 32768;
|
public static final int _32K = 32768;
|
||||||
private final ConcurrentMap<BlockCacheKey,BlockCacheLocation> cache;
|
private final Cache<BlockCacheKey,BlockCacheLocation> cache;
|
||||||
private final ByteBuffer[] banks;
|
private final ByteBuffer[] banks;
|
||||||
private final BlockLocks[] locks;
|
private final BlockLocks[] locks;
|
||||||
private final AtomicInteger[] lockCounters;
|
private final AtomicInteger[] lockCounters;
|
||||||
|
@ -69,19 +70,17 @@ public class BlockCache {
|
||||||
lockCounters[i] = new AtomicInteger();
|
lockCounters[i] = new AtomicInteger();
|
||||||
}
|
}
|
||||||
|
|
||||||
EvictionListener<BlockCacheKey,BlockCacheLocation> listener = new EvictionListener<BlockCacheKey,BlockCacheLocation>() {
|
RemovalListener<BlockCacheKey,BlockCacheLocation> listener =
|
||||||
@Override
|
notification -> releaseLocation(notification.getValue());
|
||||||
public void onEviction(BlockCacheKey key, BlockCacheLocation location) {
|
cache = Caffeine.newBuilder()
|
||||||
releaseLocation(location);
|
.removalListener(listener)
|
||||||
}
|
.maximumSize(maxEntries)
|
||||||
};
|
.build();
|
||||||
cache = new ConcurrentLinkedHashMap.Builder<BlockCacheKey,BlockCacheLocation>()
|
|
||||||
.maximumWeightedCapacity(maxEntries).listener(listener).build();
|
|
||||||
this.blockSize = blockSize;
|
this.blockSize = blockSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void release(BlockCacheKey key) {
|
public void release(BlockCacheKey key) {
|
||||||
releaseLocation(cache.remove(key));
|
cache.invalidate(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void releaseLocation(BlockCacheLocation location) {
|
private void releaseLocation(BlockCacheLocation location) {
|
||||||
|
@ -104,7 +103,7 @@ public class BlockCache {
|
||||||
+ blockSize + "] got length [" + length + "] with blockOffset ["
|
+ blockSize + "] got length [" + length + "] with blockOffset ["
|
||||||
+ blockOffset + "]");
|
+ blockOffset + "]");
|
||||||
}
|
}
|
||||||
BlockCacheLocation location = cache.get(blockCacheKey);
|
BlockCacheLocation location = cache.getIfPresent(blockCacheKey);
|
||||||
boolean newLocation = false;
|
boolean newLocation = false;
|
||||||
if (location == null) {
|
if (location == null) {
|
||||||
newLocation = true;
|
newLocation = true;
|
||||||
|
@ -122,7 +121,7 @@ public class BlockCache {
|
||||||
bank.position(bankOffset + blockOffset);
|
bank.position(bankOffset + blockOffset);
|
||||||
bank.put(data, offset, length);
|
bank.put(data, offset, length);
|
||||||
if (newLocation) {
|
if (newLocation) {
|
||||||
releaseLocation(cache.put(blockCacheKey.clone(), location));
|
cache.put(blockCacheKey.clone(), location);
|
||||||
metrics.blockCacheSize.incrementAndGet();
|
metrics.blockCacheSize.incrementAndGet();
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
|
@ -130,7 +129,7 @@ public class BlockCache {
|
||||||
|
|
||||||
public boolean fetch(BlockCacheKey blockCacheKey, byte[] buffer,
|
public boolean fetch(BlockCacheKey blockCacheKey, byte[] buffer,
|
||||||
int blockOffset, int off, int length) {
|
int blockOffset, int off, int length) {
|
||||||
BlockCacheLocation location = cache.get(blockCacheKey);
|
BlockCacheLocation location = cache.getIfPresent(blockCacheKey);
|
||||||
if (location == null) {
|
if (location == null) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -201,6 +200,6 @@ public class BlockCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getSize() {
|
public int getSize() {
|
||||||
return cache.size();
|
return cache.asMap().size();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,8 @@ import java.io.IOException;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
|
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||||
|
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.FSDirectory;
|
import org.apache.lucene.store.FSDirectory;
|
||||||
import org.apache.lucene.store.IOContext;
|
import org.apache.lucene.store.IOContext;
|
||||||
|
@ -34,12 +36,13 @@ import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap;
|
|
||||||
|
|
||||||
public class BlockDirectoryTest extends SolrTestCaseJ4 {
|
public class BlockDirectoryTest extends SolrTestCaseJ4 {
|
||||||
|
|
||||||
private class MapperCache implements Cache {
|
private class MapperCache implements Cache {
|
||||||
public Map<String, byte[]> map = new ConcurrentLinkedHashMap.Builder<String, byte[]>().maximumWeightedCapacity(8).build();
|
public Map<String, byte[]> map = Caffeine.newBuilder()
|
||||||
|
.maximumSize(8)
|
||||||
|
.<String, byte[]>build()
|
||||||
|
.asMap();
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void update(String name, long blockId, int blockOffset, byte[] buffer, int offset, int length) {
|
public void update(String name, long blockId, int blockOffset, byte[] buffer, int offset, int length) {
|
||||||
|
@ -167,7 +170,10 @@ public class BlockDirectoryTest extends SolrTestCaseJ4 {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testRandomAccessWritesLargeCache() throws IOException {
|
public void testRandomAccessWritesLargeCache() throws IOException {
|
||||||
mapperCache.map = new ConcurrentLinkedHashMap.Builder<String, byte[]>().maximumWeightedCapacity(10000).build();
|
mapperCache.map = Caffeine.newBuilder()
|
||||||
|
.maximumSize(10_000)
|
||||||
|
.<String, byte[]>build()
|
||||||
|
.asMap();
|
||||||
testRandomAccessWrites();
|
testRandomAccessWrites();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
eb95a1eb55cb02018b8e0bc1609ce569b455ea98
|
|
@ -0,0 +1 @@
|
||||||
|
Copyright 2015 by Ben Manes
|
|
@ -1 +0,0 @@
|
||||||
4316d710b6619ffe210c98deb2b0893587dad454
|
|
Loading…
Reference in New Issue