mirror of https://github.com/apache/lucene.git
SOLR-13693: Use strongly-typed setters for cache parameters.
This commit is contained in:
parent
0106b74bad
commit
c48a3cd9dc
|
@ -65,7 +65,7 @@ Improvements
|
||||||
* SOLR-12368: Support InPlace DV updates for a field that does not yet exist in any documents
|
* SOLR-12368: Support InPlace DV updates for a field that does not yet exist in any documents
|
||||||
(hossman, Simon Willnauer, Adrien Grand, Munendra S N)
|
(hossman, Simon Willnauer, Adrien Grand, Munendra S N)
|
||||||
|
|
||||||
* SOLR-13558: Allow dynamic resizing of SolrCache-s. (ab)
|
* SOLR-13558, SOLR-13693: Allow dynamic resizing of SolrCache-s. (ab)
|
||||||
|
|
||||||
* SOLR-6305: Ability to set the replication factor for index files created by HDFSDirectoryFactory (Boris Pasko via Kevin Risden)
|
* SOLR-6305: Ability to set the replication factor for index files created by HDFSDirectoryFactory (Boris Pasko via Kevin Risden)
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,6 @@ import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.lang.invoke.MethodHandles;
|
import java.lang.invoke.MethodHandles;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -69,7 +68,7 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>,
|
||||||
private int showItems = 0;
|
private int showItems = 0;
|
||||||
|
|
||||||
private long maxRamBytes;
|
private long maxRamBytes;
|
||||||
private int sizeLimit;
|
private int maxSize;
|
||||||
private int minSizeLimit;
|
private int minSizeLimit;
|
||||||
private int initialSize;
|
private int initialSize;
|
||||||
private int acceptableSize;
|
private int acceptableSize;
|
||||||
|
@ -84,10 +83,10 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>,
|
||||||
public Object init(Map args, Object persistence, CacheRegenerator regenerator) {
|
public Object init(Map args, Object persistence, CacheRegenerator regenerator) {
|
||||||
super.init(args, regenerator);
|
super.init(args, regenerator);
|
||||||
String str = (String) args.get(SIZE_PARAM);
|
String str = (String) args.get(SIZE_PARAM);
|
||||||
sizeLimit = str == null ? 1024 : Integer.parseInt(str);
|
maxSize = str == null ? 1024 : Integer.parseInt(str);
|
||||||
str = (String) args.get(MIN_SIZE_PARAM);
|
str = (String) args.get(MIN_SIZE_PARAM);
|
||||||
if (str == null) {
|
if (str == null) {
|
||||||
minSizeLimit = (int) (sizeLimit * 0.9);
|
minSizeLimit = (int) (maxSize * 0.9);
|
||||||
} else {
|
} else {
|
||||||
minSizeLimit = Integer.parseInt(str);
|
minSizeLimit = Integer.parseInt(str);
|
||||||
}
|
}
|
||||||
|
@ -95,7 +94,7 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>,
|
||||||
|
|
||||||
str = (String) args.get(ACCEPTABLE_SIZE_PARAM);
|
str = (String) args.get(ACCEPTABLE_SIZE_PARAM);
|
||||||
if (str == null) {
|
if (str == null) {
|
||||||
acceptableSize = (int) (sizeLimit * 0.95);
|
acceptableSize = (int) (maxSize * 0.95);
|
||||||
} else {
|
} else {
|
||||||
acceptableSize = Integer.parseInt(str);
|
acceptableSize = Integer.parseInt(str);
|
||||||
}
|
}
|
||||||
|
@ -103,7 +102,7 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>,
|
||||||
acceptableSize = Math.max(minSizeLimit, acceptableSize);
|
acceptableSize = Math.max(minSizeLimit, acceptableSize);
|
||||||
|
|
||||||
str = (String) args.get(INITIAL_SIZE_PARAM);
|
str = (String) args.get(INITIAL_SIZE_PARAM);
|
||||||
initialSize = str == null ? sizeLimit : Integer.parseInt(str);
|
initialSize = str == null ? maxSize : Integer.parseInt(str);
|
||||||
str = (String) args.get(CLEANUP_THREAD_PARAM);
|
str = (String) args.get(CLEANUP_THREAD_PARAM);
|
||||||
cleanupThread = str == null ? false : Boolean.parseBoolean(str);
|
cleanupThread = str == null ? false : Boolean.parseBoolean(str);
|
||||||
|
|
||||||
|
@ -119,8 +118,8 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>,
|
||||||
cache = new ConcurrentLRUCache<>(ramLowerWatermark, maxRamBytes, cleanupThread, null);
|
cache = new ConcurrentLRUCache<>(ramLowerWatermark, maxRamBytes, cleanupThread, null);
|
||||||
} else {
|
} else {
|
||||||
ramLowerWatermark = -1L;
|
ramLowerWatermark = -1L;
|
||||||
description = generateDescription(sizeLimit, initialSize, minSizeLimit, acceptableSize, cleanupThread);
|
description = generateDescription(maxSize, initialSize, minSizeLimit, acceptableSize, cleanupThread);
|
||||||
cache = new ConcurrentLRUCache<>(sizeLimit, minSizeLimit, acceptableSize, initialSize, cleanupThread, false, null);
|
cache = new ConcurrentLRUCache<>(maxSize, minSizeLimit, acceptableSize, initialSize, cleanupThread, false, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
cache.setAlive(false);
|
cache.setAlive(false);
|
||||||
|
@ -143,7 +142,7 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>,
|
||||||
if (maxRamBytes != Long.MAX_VALUE) {
|
if (maxRamBytes != Long.MAX_VALUE) {
|
||||||
return generateDescription(maxRamBytes, ramLowerWatermark, cleanupThread);
|
return generateDescription(maxRamBytes, ramLowerWatermark, cleanupThread);
|
||||||
} else {
|
} else {
|
||||||
return generateDescription(sizeLimit, initialSize, minSizeLimit, acceptableSize, cleanupThread);
|
return generateDescription(maxSize, initialSize, minSizeLimit, acceptableSize, cleanupThread);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -273,15 +272,15 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>,
|
||||||
cevictions += statistiscs.getCumulativeEvictions();
|
cevictions += statistiscs.getCumulativeEvictions();
|
||||||
}
|
}
|
||||||
|
|
||||||
map.put("lookups", lookups);
|
map.put(LOOKUPS_PARAM, lookups);
|
||||||
map.put("hits", hits);
|
map.put(HITS_PARAM, hits);
|
||||||
map.put("hitratio", calcHitRatio(lookups, hits));
|
map.put(HIT_RATIO_PARAM, calcHitRatio(lookups, hits));
|
||||||
map.put("inserts", inserts);
|
map.put(INSERTS_PARAM, inserts);
|
||||||
map.put("evictions", evictions);
|
map.put(EVICTIONS_PARAM, evictions);
|
||||||
map.put("size", size);
|
map.put(SIZE_PARAM, size);
|
||||||
map.put("cleanupThread", cleanupThread);
|
map.put("cleanupThread", cleanupThread);
|
||||||
map.put("ramBytesUsed", ramBytesUsed());
|
map.put(RAM_BYTES_USED_PARAM, ramBytesUsed());
|
||||||
map.put("maxRamMB", maxRamBytes != Long.MAX_VALUE ? maxRamBytes / 1024L / 1024L : -1L);
|
map.put(MAX_RAM_MB_PARAM, getMaxRamMB());
|
||||||
|
|
||||||
map.put("warmupTime", warmupTime);
|
map.put("warmupTime", warmupTime);
|
||||||
map.put("cumulative_lookups", clookups);
|
map.put("cumulative_lookups", clookups);
|
||||||
|
@ -331,88 +330,48 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Map<String, Object> getResourceLimits() {
|
public int getMaxSize() {
|
||||||
Map<String, Object> limits = new HashMap<>();
|
return maxSize != Integer.MAX_VALUE ? maxSize : -1;
|
||||||
limits.put(SIZE_PARAM, cache.getStats().getCurrentSize());
|
|
||||||
limits.put(MIN_SIZE_PARAM, minSizeLimit);
|
|
||||||
limits.put(ACCEPTABLE_SIZE_PARAM, acceptableSize);
|
|
||||||
limits.put(CLEANUP_THREAD_PARAM, cleanupThread);
|
|
||||||
limits.put(SHOW_ITEMS_PARAM, showItems);
|
|
||||||
limits.put(MAX_RAM_MB_PARAM, maxRamBytes != Long.MAX_VALUE ? maxRamBytes / 1024L / 1024L : -1L);
|
|
||||||
return limits;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setResourceLimit(String limitName, Object val) {
|
public void setMaxSize(int maxSize) {
|
||||||
if (CLEANUP_THREAD_PARAM.equals(limitName)) {
|
if (maxSize > 0) {
|
||||||
Boolean value;
|
this.maxSize = maxSize;
|
||||||
try {
|
} else {
|
||||||
value = Boolean.parseBoolean(val.toString());
|
this.maxSize = Integer.MAX_VALUE;
|
||||||
cleanupThread = value;
|
|
||||||
cache.setRunCleanupThread(cleanupThread);
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new IllegalArgumentException("Invalid new value for boolean limit '" + limitName + "': " + val);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Number value;
|
checkAndAdjustLimits();
|
||||||
try {
|
cache.setUpperWaterMark(maxSize);
|
||||||
value = Long.parseLong(String.valueOf(val));
|
cache.setLowerWaterMark(minSizeLimit);
|
||||||
} catch (Exception e) {
|
description = generateDescription();
|
||||||
throw new IllegalArgumentException("Invalid new value for numeric limit '" + limitName +"': " + val);
|
}
|
||||||
}
|
|
||||||
if (!limitName.equals(MAX_RAM_MB_PARAM)) {
|
@Override
|
||||||
if (value.intValue() <= 1) {
|
public int getMaxRamMB() {
|
||||||
throw new IllegalArgumentException("Invalid new value for numeric limit '" + limitName +"': " + value);
|
return maxRamBytes != Long.MAX_VALUE ? (int) (maxRamBytes / 1024L / 1024L) : -1;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
if (value.longValue() > Integer.MAX_VALUE) {
|
@Override
|
||||||
throw new IllegalArgumentException("Invalid new value for numeric limit '" + limitName +"': " + value);
|
public void setMaxRamMB(int maxRamMB) {
|
||||||
}
|
maxRamBytes = maxRamMB < 0 ? Long.MAX_VALUE : maxRamMB * 1024L * 1024L;
|
||||||
switch (limitName) {
|
if (maxRamMB < 0) {
|
||||||
case SIZE_PARAM:
|
ramLowerWatermark = Long.MIN_VALUE;
|
||||||
sizeLimit = value.intValue();
|
} else {
|
||||||
checkAndAdjustLimits();
|
ramLowerWatermark = Math.round(maxRamBytes * 0.8);
|
||||||
cache.setUpperWaterMark(sizeLimit);
|
|
||||||
cache.setLowerWaterMark(minSizeLimit);
|
|
||||||
break;
|
|
||||||
case MIN_SIZE_PARAM:
|
|
||||||
minSizeLimit = value.intValue();
|
|
||||||
checkAndAdjustLimits();
|
|
||||||
cache.setUpperWaterMark(sizeLimit);
|
|
||||||
cache.setLowerWaterMark(minSizeLimit);
|
|
||||||
break;
|
|
||||||
case ACCEPTABLE_SIZE_PARAM:
|
|
||||||
acceptableSize = value.intValue();
|
|
||||||
acceptableSize = Math.max(minSizeLimit, acceptableSize);
|
|
||||||
cache.setAcceptableWaterMark(acceptableSize);
|
|
||||||
break;
|
|
||||||
case MAX_RAM_MB_PARAM:
|
|
||||||
long maxRamMB = value.intValue();
|
|
||||||
maxRamBytes = maxRamMB < 0 ? Long.MAX_VALUE : maxRamMB * 1024L * 1024L;
|
|
||||||
if (maxRamMB < 0) {
|
|
||||||
ramLowerWatermark = Long.MIN_VALUE;
|
|
||||||
} else {
|
|
||||||
ramLowerWatermark = Math.round(maxRamBytes * 0.8);
|
|
||||||
}
|
|
||||||
cache.setRamUpperWatermark(maxRamBytes);
|
|
||||||
cache.setRamLowerWatermark(ramLowerWatermark);
|
|
||||||
break;
|
|
||||||
case SHOW_ITEMS_PARAM:
|
|
||||||
showItems = value.intValue();
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Unsupported limit '" + limitName + "'");
|
|
||||||
}
|
}
|
||||||
|
cache.setRamUpperWatermark(maxRamBytes);
|
||||||
|
cache.setRamLowerWatermark(ramLowerWatermark);
|
||||||
description = generateDescription();
|
description = generateDescription();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void checkAndAdjustLimits() {
|
private void checkAndAdjustLimits() {
|
||||||
if (minSizeLimit <= 0) minSizeLimit = 1;
|
if (minSizeLimit <= 0) minSizeLimit = 1;
|
||||||
if (sizeLimit <= minSizeLimit) {
|
if (maxSize <= minSizeLimit) {
|
||||||
if (sizeLimit > 1) {
|
if (maxSize > 1) {
|
||||||
minSizeLimit = sizeLimit - 1;
|
minSizeLimit = maxSize - 1;
|
||||||
} else {
|
} else {
|
||||||
sizeLimit = minSizeLimit + 1;
|
maxSize = minSizeLimit + 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,6 @@
|
||||||
package org.apache.solr.search;
|
package org.apache.solr.search;
|
||||||
|
|
||||||
import java.lang.invoke.MethodHandles;
|
import java.lang.invoke.MethodHandles;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -81,7 +80,7 @@ public class LFUCache<K, V> implements SolrCache<K, V>, Accountable {
|
||||||
private Set<String> metricNames = ConcurrentHashMap.newKeySet();
|
private Set<String> metricNames = ConcurrentHashMap.newKeySet();
|
||||||
private MetricRegistry registry;
|
private MetricRegistry registry;
|
||||||
|
|
||||||
private int sizeLimit;
|
private int maxSize;
|
||||||
private int minSizeLimit;
|
private int minSizeLimit;
|
||||||
private int initialSize;
|
private int initialSize;
|
||||||
private int acceptableSize;
|
private int acceptableSize;
|
||||||
|
@ -93,10 +92,10 @@ public class LFUCache<K, V> implements SolrCache<K, V>, Accountable {
|
||||||
this.regenerator = regenerator;
|
this.regenerator = regenerator;
|
||||||
name = (String) args.get(NAME);
|
name = (String) args.get(NAME);
|
||||||
String str = (String) args.get(SIZE_PARAM);
|
String str = (String) args.get(SIZE_PARAM);
|
||||||
sizeLimit = str == null ? 1024 : Integer.parseInt(str);
|
maxSize = str == null ? 1024 : Integer.parseInt(str);
|
||||||
str = (String) args.get(MIN_SIZE_PARAM);
|
str = (String) args.get(MIN_SIZE_PARAM);
|
||||||
if (str == null) {
|
if (str == null) {
|
||||||
minSizeLimit = (int) (sizeLimit * 0.9);
|
minSizeLimit = (int) (maxSize * 0.9);
|
||||||
} else {
|
} else {
|
||||||
minSizeLimit = Integer.parseInt(str);
|
minSizeLimit = Integer.parseInt(str);
|
||||||
}
|
}
|
||||||
|
@ -104,7 +103,7 @@ public class LFUCache<K, V> implements SolrCache<K, V>, Accountable {
|
||||||
|
|
||||||
str = (String) args.get(ACCEPTABLE_SIZE_PARAM);
|
str = (String) args.get(ACCEPTABLE_SIZE_PARAM);
|
||||||
if (str == null) {
|
if (str == null) {
|
||||||
acceptableSize = (int) (sizeLimit * 0.95);
|
acceptableSize = (int) (maxSize * 0.95);
|
||||||
} else {
|
} else {
|
||||||
acceptableSize = Integer.parseInt(str);
|
acceptableSize = Integer.parseInt(str);
|
||||||
}
|
}
|
||||||
|
@ -112,7 +111,7 @@ public class LFUCache<K, V> implements SolrCache<K, V>, Accountable {
|
||||||
acceptableSize = Math.max(minSizeLimit, acceptableSize);
|
acceptableSize = Math.max(minSizeLimit, acceptableSize);
|
||||||
|
|
||||||
str = (String) args.get(INITIAL_SIZE_PARAM);
|
str = (String) args.get(INITIAL_SIZE_PARAM);
|
||||||
initialSize = str == null ? sizeLimit : Integer.parseInt(str);
|
initialSize = str == null ? maxSize : Integer.parseInt(str);
|
||||||
str = (String) args.get(AUTOWARM_COUNT_PARAM);
|
str = (String) args.get(AUTOWARM_COUNT_PARAM);
|
||||||
autowarmCount = str == null ? 0 : Integer.parseInt(str);
|
autowarmCount = str == null ? 0 : Integer.parseInt(str);
|
||||||
str = (String) args.get(CLEANUP_THREAD_PARAM);
|
str = (String) args.get(CLEANUP_THREAD_PARAM);
|
||||||
|
@ -127,7 +126,7 @@ public class LFUCache<K, V> implements SolrCache<K, V>, Accountable {
|
||||||
|
|
||||||
description = generateDescription();
|
description = generateDescription();
|
||||||
|
|
||||||
cache = new ConcurrentLFUCache<>(sizeLimit, minSizeLimit, acceptableSize, initialSize, cleanupThread, false, null, timeDecay);
|
cache = new ConcurrentLFUCache<>(maxSize, minSizeLimit, acceptableSize, initialSize, cleanupThread, false, null, timeDecay);
|
||||||
cache.setAlive(false);
|
cache.setAlive(false);
|
||||||
|
|
||||||
statsList = (List<ConcurrentLFUCache.Stats>) persistence;
|
statsList = (List<ConcurrentLFUCache.Stats>) persistence;
|
||||||
|
@ -145,7 +144,7 @@ public class LFUCache<K, V> implements SolrCache<K, V>, Accountable {
|
||||||
}
|
}
|
||||||
|
|
||||||
private String generateDescription() {
|
private String generateDescription() {
|
||||||
String descr = "Concurrent LFU Cache(maxSize=" + sizeLimit + ", initialSize=" + initialSize +
|
String descr = "Concurrent LFU Cache(maxSize=" + maxSize + ", initialSize=" + initialSize +
|
||||||
", minSize=" + minSizeLimit + ", acceptableSize=" + acceptableSize + ", cleanupThread=" + cleanupThread +
|
", minSize=" + minSizeLimit + ", acceptableSize=" + acceptableSize + ", cleanupThread=" + cleanupThread +
|
||||||
", timeDecay=" + Boolean.toString(timeDecay);
|
", timeDecay=" + Boolean.toString(timeDecay);
|
||||||
if (autowarmCount > 0) {
|
if (autowarmCount > 0) {
|
||||||
|
@ -266,16 +265,22 @@ public class LFUCache<K, V> implements SolrCache<K, V>, Accountable {
|
||||||
long evictions = stats.getCumulativeEvictions();
|
long evictions = stats.getCumulativeEvictions();
|
||||||
long size = stats.getCurrentSize();
|
long size = stats.getCurrentSize();
|
||||||
|
|
||||||
map.put("lookups", lookups);
|
map.put(LOOKUPS_PARAM, lookups);
|
||||||
map.put("hits", hits);
|
map.put(HITS_PARAM, hits);
|
||||||
map.put("hitratio", calcHitRatio(lookups, hits));
|
map.put(HIT_RATIO_PARAM, calcHitRatio(lookups, hits));
|
||||||
map.put("inserts", inserts);
|
map.put(INSERTS_PARAM, inserts);
|
||||||
map.put("evictions", evictions);
|
map.put(EVICTIONS_PARAM, evictions);
|
||||||
map.put("size", size);
|
map.put(SIZE_PARAM, size);
|
||||||
|
map.put(MAX_SIZE_PARAM, maxSize);
|
||||||
|
map.put(MIN_SIZE_PARAM, minSizeLimit);
|
||||||
|
map.put(ACCEPTABLE_SIZE_PARAM, acceptableSize);
|
||||||
|
map.put(AUTOWARM_COUNT_PARAM, autowarmCount);
|
||||||
|
map.put(CLEANUP_THREAD_PARAM, cleanupThread);
|
||||||
|
map.put(SHOW_ITEMS_PARAM, showItems);
|
||||||
|
map.put(TIME_DECAY_PARAM, timeDecay);
|
||||||
|
|
||||||
|
|
||||||
map.put("warmupTime", warmupTime);
|
map.put("warmupTime", warmupTime);
|
||||||
map.put("timeDecay", timeDecay);
|
|
||||||
map.put("cleanupThread", cleanupThread);
|
|
||||||
|
|
||||||
long clookups = 0;
|
long clookups = 0;
|
||||||
long chits = 0;
|
long chits = 0;
|
||||||
|
@ -294,7 +299,7 @@ public class LFUCache<K, V> implements SolrCache<K, V>, Accountable {
|
||||||
map.put("cumulative_hitratio", calcHitRatio(clookups, chits));
|
map.put("cumulative_hitratio", calcHitRatio(clookups, chits));
|
||||||
map.put("cumulative_inserts", cinserts);
|
map.put("cumulative_inserts", cinserts);
|
||||||
map.put("cumulative_evictions", cevictions);
|
map.put("cumulative_evictions", cevictions);
|
||||||
map.put("ramBytesUsed", ramBytesUsed());
|
map.put(RAM_BYTES_USED_PARAM, ramBytesUsed());
|
||||||
|
|
||||||
if (detailed && showItems != 0) {
|
if (detailed && showItems != 0) {
|
||||||
Map items = cache.getMostUsedItems(showItems == -1 ? Integer.MAX_VALUE : showItems);
|
Map items = cache.getMostUsedItems(showItems == -1 ? Integer.MAX_VALUE : showItems);
|
||||||
|
@ -346,85 +351,40 @@ public class LFUCache<K, V> implements SolrCache<K, V>, Accountable {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Map<String, Object> getResourceLimits() {
|
public int getMaxSize() {
|
||||||
Map<String, Object> limits = new HashMap<>();
|
return maxSize != Integer.MAX_VALUE ? maxSize : -1;
|
||||||
limits.put(SIZE_PARAM, cache.getStats().getCurrentSize());
|
|
||||||
limits.put(MIN_SIZE_PARAM, minSizeLimit);
|
|
||||||
limits.put(ACCEPTABLE_SIZE_PARAM, acceptableSize);
|
|
||||||
limits.put(AUTOWARM_COUNT_PARAM, autowarmCount);
|
|
||||||
limits.put(CLEANUP_THREAD_PARAM, cleanupThread);
|
|
||||||
limits.put(SHOW_ITEMS_PARAM, showItems);
|
|
||||||
limits.put(TIME_DECAY_PARAM, timeDecay);
|
|
||||||
return limits;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void setResourceLimit(String limitName, Object val) {
|
public void setMaxSize(int maxSize) {
|
||||||
if (TIME_DECAY_PARAM.equals(limitName) || CLEANUP_THREAD_PARAM.equals(limitName)) {
|
if (maxSize > 0) {
|
||||||
Boolean value;
|
this.maxSize = maxSize;
|
||||||
try {
|
|
||||||
value = Boolean.parseBoolean(String.valueOf(val));
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new IllegalArgumentException("Invalid value of boolean limit '" + limitName + "': " + val);
|
|
||||||
}
|
|
||||||
switch (limitName) {
|
|
||||||
case TIME_DECAY_PARAM:
|
|
||||||
timeDecay = value;
|
|
||||||
cache.setTimeDecay(timeDecay);
|
|
||||||
break;
|
|
||||||
case CLEANUP_THREAD_PARAM:
|
|
||||||
cleanupThread = value;
|
|
||||||
cache.setRunCleanupThread(cleanupThread);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
Number value;
|
this.maxSize = Integer.MAX_VALUE;
|
||||||
try {
|
|
||||||
value = Long.parseLong(String.valueOf(val));
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new IllegalArgumentException("Invalid new value for numeric limit '" + limitName +"': " + val);
|
|
||||||
}
|
|
||||||
if (value.intValue() <= 1 || value.longValue() > Integer.MAX_VALUE) {
|
|
||||||
throw new IllegalArgumentException("Out of range new value for numeric limit '" + limitName +"': " + value);
|
|
||||||
}
|
|
||||||
switch (limitName) {
|
|
||||||
case SIZE_PARAM:
|
|
||||||
sizeLimit = value.intValue();
|
|
||||||
checkAndAdjustLimits();
|
|
||||||
cache.setUpperWaterMark(sizeLimit);
|
|
||||||
cache.setLowerWaterMark(minSizeLimit);
|
|
||||||
break;
|
|
||||||
case MIN_SIZE_PARAM:
|
|
||||||
minSizeLimit = value.intValue();
|
|
||||||
checkAndAdjustLimits();
|
|
||||||
cache.setUpperWaterMark(sizeLimit);
|
|
||||||
cache.setLowerWaterMark(minSizeLimit);
|
|
||||||
break;
|
|
||||||
case ACCEPTABLE_SIZE_PARAM:
|
|
||||||
acceptableSize = value.intValue();
|
|
||||||
acceptableSize = Math.max(minSizeLimit, acceptableSize);
|
|
||||||
cache.setAcceptableWaterMark(acceptableSize);
|
|
||||||
break;
|
|
||||||
case AUTOWARM_COUNT_PARAM:
|
|
||||||
autowarmCount = value.intValue();
|
|
||||||
break;
|
|
||||||
case SHOW_ITEMS_PARAM:
|
|
||||||
showItems = value.intValue();
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Unsupported numeric limit '" + limitName + "'");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
checkAndAdjustLimits();
|
||||||
|
cache.setUpperWaterMark(maxSize);
|
||||||
|
cache.setLowerWaterMark(minSizeLimit);
|
||||||
description = generateDescription();
|
description = generateDescription();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getMaxRamMB() {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setMaxRamMB(int maxRamMB) {
|
||||||
|
// no-op
|
||||||
|
}
|
||||||
|
|
||||||
private void checkAndAdjustLimits() {
|
private void checkAndAdjustLimits() {
|
||||||
if (minSizeLimit <= 0) minSizeLimit = 1;
|
if (minSizeLimit <= 0) minSizeLimit = 1;
|
||||||
if (sizeLimit <= minSizeLimit) {
|
if (maxSize <= minSizeLimit) {
|
||||||
if (sizeLimit > 1) {
|
if (maxSize > 1) {
|
||||||
minSizeLimit = sizeLimit - 1;
|
minSizeLimit = maxSize - 1;
|
||||||
} else {
|
} else {
|
||||||
sizeLimit = minSizeLimit + 1;
|
maxSize = minSizeLimit + 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.solr.search;
|
||||||
|
|
||||||
import java.lang.invoke.MethodHandles;
|
import java.lang.invoke.MethodHandles;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
|
@ -76,7 +75,7 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
|
||||||
private MetricsMap cacheMap;
|
private MetricsMap cacheMap;
|
||||||
private Set<String> metricNames = ConcurrentHashMap.newKeySet();
|
private Set<String> metricNames = ConcurrentHashMap.newKeySet();
|
||||||
private MetricRegistry registry;
|
private MetricRegistry registry;
|
||||||
private int sizeLimit;
|
private int maxSize;
|
||||||
private int initialSize;
|
private int initialSize;
|
||||||
|
|
||||||
private long maxRamBytes = Long.MAX_VALUE;
|
private long maxRamBytes = Long.MAX_VALUE;
|
||||||
|
@ -88,9 +87,9 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
|
||||||
public Object init(Map args, Object persistence, CacheRegenerator regenerator) {
|
public Object init(Map args, Object persistence, CacheRegenerator regenerator) {
|
||||||
super.init(args, regenerator);
|
super.init(args, regenerator);
|
||||||
String str = (String)args.get(SIZE_PARAM);
|
String str = (String)args.get(SIZE_PARAM);
|
||||||
this.sizeLimit = str==null ? 1024 : Integer.parseInt(str);
|
this.maxSize = str==null ? 1024 : Integer.parseInt(str);
|
||||||
str = (String)args.get("initialSize");
|
str = (String)args.get("initialSize");
|
||||||
initialSize = Math.min(str==null ? 1024 : Integer.parseInt(str), sizeLimit);
|
initialSize = Math.min(str==null ? 1024 : Integer.parseInt(str), maxSize);
|
||||||
str = (String) args.get(MAX_RAM_MB_PARAM);
|
str = (String) args.get(MAX_RAM_MB_PARAM);
|
||||||
this.maxRamBytes = str == null ? Long.MAX_VALUE : (long) (Double.parseDouble(str) * 1024L * 1024L);
|
this.maxRamBytes = str == null ? Long.MAX_VALUE : (long) (Double.parseDouble(str) * 1024L * 1024L);
|
||||||
description = generateDescription();
|
description = generateDescription();
|
||||||
|
@ -115,7 +114,7 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
|
||||||
// must return false according to javadocs of removeEldestEntry if we're modifying
|
// must return false according to javadocs of removeEldestEntry if we're modifying
|
||||||
// the map ourselves
|
// the map ourselves
|
||||||
return false;
|
return false;
|
||||||
} else if (size() > getSizeLimit()) {
|
} else if (size() > getMaxSize()) {
|
||||||
Iterator<Map.Entry<K, V>> iterator = entrySet().iterator();
|
Iterator<Map.Entry<K, V>> iterator = entrySet().iterator();
|
||||||
do {
|
do {
|
||||||
Map.Entry<K, V> entry = iterator.next();
|
Map.Entry<K, V> entry = iterator.next();
|
||||||
|
@ -129,7 +128,7 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
evictions++;
|
evictions++;
|
||||||
stats.evictions.increment();
|
stats.evictions.increment();
|
||||||
} while (iterator.hasNext() && size() > getSizeLimit());
|
} while (iterator.hasNext() && size() > getMaxSize());
|
||||||
// must return false according to javadocs of removeEldestEntry if we're modifying
|
// must return false according to javadocs of removeEldestEntry if we're modifying
|
||||||
// the map ourselves
|
// the map ourselves
|
||||||
return false;
|
return false;
|
||||||
|
@ -149,10 +148,6 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
|
||||||
return persistence;
|
return persistence;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getSizeLimit() {
|
|
||||||
return sizeLimit;
|
|
||||||
}
|
|
||||||
|
|
||||||
public long getMaxRamBytes() {
|
public long getMaxRamBytes() {
|
||||||
return maxRamBytes;
|
return maxRamBytes;
|
||||||
}
|
}
|
||||||
|
@ -162,7 +157,7 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
|
||||||
* @return Returns the description of this cache.
|
* @return Returns the description of this cache.
|
||||||
*/
|
*/
|
||||||
private String generateDescription() {
|
private String generateDescription() {
|
||||||
String description = "LRU Cache(maxSize=" + getSizeLimit() + ", initialSize=" + initialSize;
|
String description = "LRU Cache(maxSize=" + getMaxSize() + ", initialSize=" + initialSize;
|
||||||
if (isAutowarmingOn()) {
|
if (isAutowarmingOn()) {
|
||||||
description += ", " + getAutowarmDescription();
|
description += ", " + getAutowarmDescription();
|
||||||
}
|
}
|
||||||
|
@ -182,7 +177,7 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public V put(K key, V value) {
|
public V put(K key, V value) {
|
||||||
if (sizeLimit == Integer.MAX_VALUE && maxRamBytes == Long.MAX_VALUE) {
|
if (maxSize == Integer.MAX_VALUE && maxRamBytes == Long.MAX_VALUE) {
|
||||||
throw new IllegalStateException("Cache: " + getName() + " has neither size nor RAM limit!");
|
throw new IllegalStateException("Cache: " + getName() + " has neither size nor RAM limit!");
|
||||||
}
|
}
|
||||||
synchronized (map) {
|
synchronized (map) {
|
||||||
|
@ -313,14 +308,15 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
|
||||||
registry = manager.registry(registryName);
|
registry = manager.registry(registryName);
|
||||||
cacheMap = new MetricsMap((detailed, res) -> {
|
cacheMap = new MetricsMap((detailed, res) -> {
|
||||||
synchronized (map) {
|
synchronized (map) {
|
||||||
res.put("lookups", lookups);
|
res.put(LOOKUPS_PARAM, lookups);
|
||||||
res.put("hits", hits);
|
res.put(HITS_PARAM, hits);
|
||||||
res.put("hitratio", calcHitRatio(lookups,hits));
|
res.put(HIT_RATIO_PARAM, calcHitRatio(lookups,hits));
|
||||||
res.put("inserts", inserts);
|
res.put(INSERTS_PARAM, inserts);
|
||||||
res.put("evictions", evictions);
|
res.put(EVICTIONS_PARAM, evictions);
|
||||||
res.put("size", map.size());
|
res.put(SIZE_PARAM, map.size());
|
||||||
res.put("ramBytesUsed", ramBytesUsed());
|
res.put(RAM_BYTES_USED_PARAM, ramBytesUsed());
|
||||||
res.put("maxRamMB", maxRamBytes != Long.MAX_VALUE ? maxRamBytes / 1024L / 1024L : -1L);
|
res.put(MAX_RAM_MB_PARAM, getMaxRamMB());
|
||||||
|
res.put(MAX_SIZE_PARAM, maxSize);
|
||||||
res.put("evictionsRamUsage", evictionsRamUsage);
|
res.put("evictionsRamUsage", evictionsRamUsage);
|
||||||
}
|
}
|
||||||
res.put("warmupTime", warmupTime);
|
res.put("warmupTime", warmupTime);
|
||||||
|
@ -367,43 +363,31 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Map<String, Object> getResourceLimits() {
|
public int getMaxSize() {
|
||||||
Map<String, Object> limits = new HashMap<>();
|
return maxSize != Integer.MAX_VALUE ? maxSize : -1;
|
||||||
limits.put(SIZE_PARAM, sizeLimit);
|
|
||||||
limits.put(MAX_RAM_MB_PARAM, maxRamBytes != Long.MAX_VALUE ? maxRamBytes / 1024L / 1024L : -1L);
|
|
||||||
return limits;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setResourceLimit(String limitName, Object val) {
|
public void setMaxSize(int maxSize) {
|
||||||
if (!(val instanceof Number)) {
|
if (maxSize > 0) {
|
||||||
try {
|
this.maxSize = maxSize;
|
||||||
val = Long.parseLong(String.valueOf(val));
|
} else {
|
||||||
} catch (Exception e) {
|
this.maxSize = Integer.MAX_VALUE;
|
||||||
throw new IllegalArgumentException("Unsupported value type (not a number) for limit '" + limitName + "': " + val + " (" + val.getClass().getName() + ")");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Number value = (Number)val;
|
description = generateDescription();
|
||||||
if (value.longValue() > Integer.MAX_VALUE) {
|
}
|
||||||
throw new IllegalArgumentException("Invalid new value for limit '" + limitName +"': " + value);
|
|
||||||
}
|
@Override
|
||||||
switch (limitName) {
|
public int getMaxRamMB() {
|
||||||
case SIZE_PARAM:
|
return maxRamBytes != Long.MAX_VALUE ? (int) (maxRamBytes / 1024L / 1024L) : -1;
|
||||||
if (value.intValue() > 0) {
|
}
|
||||||
sizeLimit = value.intValue();
|
|
||||||
} else {
|
@Override
|
||||||
sizeLimit = Integer.MAX_VALUE;
|
public void setMaxRamMB(int maxRamMB) {
|
||||||
}
|
if (maxRamMB > 0) {
|
||||||
break;
|
maxRamBytes = maxRamMB * 1024L * 1024L;
|
||||||
case MAX_RAM_MB_PARAM:
|
} else {
|
||||||
if (value.intValue() > 0) {
|
maxRamBytes = Long.MAX_VALUE;
|
||||||
maxRamBytes = value.intValue() * 1024L * 1024L;
|
|
||||||
} else {
|
|
||||||
maxRamBytes = Long.MAX_VALUE;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Unsupported limit name '" + limitName + "'");
|
|
||||||
}
|
}
|
||||||
description = generateDescription();
|
description = generateDescription();
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,7 +27,14 @@ import java.util.Map;
|
||||||
*/
|
*/
|
||||||
public interface SolrCache<K,V> extends SolrInfoBean, SolrMetricProducer {
|
public interface SolrCache<K,V> extends SolrInfoBean, SolrMetricProducer {
|
||||||
|
|
||||||
|
String HIT_RATIO_PARAM = "hitratio";
|
||||||
|
String HITS_PARAM = "hits";
|
||||||
|
String INSERTS_PARAM = "inserts";
|
||||||
|
String EVICTIONS_PARAM = "evictions";
|
||||||
|
String LOOKUPS_PARAM = "lookups";
|
||||||
String SIZE_PARAM = "size";
|
String SIZE_PARAM = "size";
|
||||||
|
String MAX_SIZE_PARAM = "maxSize";
|
||||||
|
String RAM_BYTES_USED_PARAM = "ramBytesUsed";
|
||||||
String MAX_RAM_MB_PARAM = "maxRamMB";
|
String MAX_RAM_MB_PARAM = "maxRamMB";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -129,20 +136,19 @@ public interface SolrCache<K,V> extends SolrInfoBean, SolrMetricProducer {
|
||||||
/** Frees any non-memory resources */
|
/** Frees any non-memory resources */
|
||||||
public void close();
|
public void close();
|
||||||
|
|
||||||
/** Report current resource limits. */
|
/** Returns maximum size limit (number of items) if set and supported, -1 otherwise. */
|
||||||
public Map<String, Object> getResourceLimits();
|
int getMaxSize();
|
||||||
|
|
||||||
/** Set resource limits. */
|
/** Set maximum size limit (number of items), or -1 for unlimited. Note: this has effect
|
||||||
default void setResourceLimits(Map<String, Object> limits) throws Exception {
|
* only on implementations that support it, it's a no-op otherwise
|
||||||
if (limits == null || limits.isEmpty()) {
|
*/
|
||||||
return;
|
void setMaxSize(int maxSize);
|
||||||
}
|
|
||||||
for (Map.Entry<String, Object> entry : limits.entrySet()) {
|
|
||||||
setResourceLimit(entry.getKey(), entry.getValue());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Set a named resource limit. */
|
/** Returns maximum size limit (in MB) if set and supported, -1 otherwise. */
|
||||||
public void setResourceLimit(String limitName, Object value) throws Exception;
|
int getMaxRamMB();
|
||||||
|
|
||||||
|
/** Set maximum size limit (in MB), or -1 for unlimited. Note: this has effect
|
||||||
|
* only on implementations that support it, it's a no-op otherwise.
|
||||||
|
*/
|
||||||
|
void setMaxRamMB(int maxRamMB);
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,16 +81,24 @@ public class SolrCacheHolder<K, V> implements SolrCache<K,V> {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Map<String, Object> getResourceLimits() {
|
public int getMaxSize() {
|
||||||
return delegate.getResourceLimits();
|
return delegate.getMaxSize();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setResourceLimit(String limitName, Object value) throws Exception {
|
public void setMaxSize(int maxSize) {
|
||||||
delegate.setResourceLimit(limitName, value);
|
delegate.setMaxSize(maxSize);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getMaxRamMB() {
|
||||||
|
return delegate.getMaxRamMB();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setMaxRamMB(int maxRamMB) {
|
||||||
|
delegate.setMaxRamMB(maxRamMB);
|
||||||
|
}
|
||||||
|
|
||||||
public void warm(SolrIndexSearcher searcher, SolrCacheHolder src) {
|
public void warm(SolrIndexSearcher searcher, SolrCacheHolder src) {
|
||||||
delegate.warm(searcher, src.get());
|
delegate.warm(searcher, src.get());
|
||||||
|
|
|
@ -346,7 +346,7 @@ public class TestFastLRUCache extends SolrTestCase {
|
||||||
// no evictions yet
|
// no evictions yet
|
||||||
assertEquals(6, cache.size());
|
assertEquals(6, cache.size());
|
||||||
// this also sets minLimit = 4
|
// this also sets minLimit = 4
|
||||||
cache.setResourceLimit(SolrCache.SIZE_PARAM, 5);
|
cache.setMaxSize(5);
|
||||||
// should not happen yet - evictions are triggered by put
|
// should not happen yet - evictions are triggered by put
|
||||||
assertEquals(6, cache.size());
|
assertEquals(6, cache.size());
|
||||||
cache.put("6", new Accountable() {
|
cache.put("6", new Accountable() {
|
||||||
|
@ -359,7 +359,7 @@ public class TestFastLRUCache extends SolrTestCase {
|
||||||
assertEquals(4, cache.size());
|
assertEquals(4, cache.size());
|
||||||
|
|
||||||
// modify ram limit
|
// modify ram limit
|
||||||
cache.setResourceLimit(SolrCache.MAX_RAM_MB_PARAM, 3);
|
cache.setMaxRamMB(3);
|
||||||
// should not happen yet - evictions are triggered by put
|
// should not happen yet - evictions are triggered by put
|
||||||
assertEquals(4, cache.size());
|
assertEquals(4, cache.size());
|
||||||
// this evicts down to 3MB * 0.8, ie. ramLowerWaterMark
|
// this evicts down to 3MB * 0.8, ie. ramLowerWaterMark
|
||||||
|
@ -376,7 +376,7 @@ public class TestFastLRUCache extends SolrTestCase {
|
||||||
|
|
||||||
// scale up
|
// scale up
|
||||||
|
|
||||||
cache.setResourceLimit(SolrCache.MAX_RAM_MB_PARAM, 4);
|
cache.setMaxRamMB(4);
|
||||||
cache.put("8", new Accountable() {
|
cache.put("8", new Accountable() {
|
||||||
@Override
|
@Override
|
||||||
public long ramBytesUsed() {
|
public long ramBytesUsed() {
|
||||||
|
@ -385,7 +385,7 @@ public class TestFastLRUCache extends SolrTestCase {
|
||||||
});
|
});
|
||||||
assertEquals(4, cache.size());
|
assertEquals(4, cache.size());
|
||||||
|
|
||||||
cache.setResourceLimit(SolrCache.SIZE_PARAM, 10);
|
cache.setMaxSize(10);
|
||||||
for (int i = 0; i < 6; i++) {
|
for (int i = 0; i < 6; i++) {
|
||||||
cache.put("new" + i, new Accountable() {
|
cache.put("new" + i, new Accountable() {
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -472,7 +472,7 @@ public class TestLFUCache extends SolrTestCaseJ4 {
|
||||||
// no evictions yet
|
// no evictions yet
|
||||||
assertEquals(6, cache.size());
|
assertEquals(6, cache.size());
|
||||||
// this sets minSize = 4, evictions will target minSize
|
// this sets minSize = 4, evictions will target minSize
|
||||||
cache.setResourceLimit(SolrCache.SIZE_PARAM, 5);
|
cache.setMaxSize(5);
|
||||||
// should not happen yet - evictions are triggered by put
|
// should not happen yet - evictions are triggered by put
|
||||||
assertEquals(6, cache.size());
|
assertEquals(6, cache.size());
|
||||||
cache.put("6", "foo 6");
|
cache.put("6", "foo 6");
|
||||||
|
@ -487,7 +487,7 @@ public class TestLFUCache extends SolrTestCaseJ4 {
|
||||||
|
|
||||||
// scale up
|
// scale up
|
||||||
|
|
||||||
cache.setResourceLimit(SolrCache.SIZE_PARAM, 10);
|
cache.setMaxSize(10);
|
||||||
for (int i = 0; i < 6; i++) {
|
for (int i = 0; i < 6; i++) {
|
||||||
cache.put("new" + i, "bar " + i);
|
cache.put("new" + i, "bar " + i);
|
||||||
}
|
}
|
||||||
|
|
|
@ -206,7 +206,7 @@ public class TestLRUCache extends SolrTestCase {
|
||||||
}
|
}
|
||||||
// no evictions yet
|
// no evictions yet
|
||||||
assertEquals(6, cache.size());
|
assertEquals(6, cache.size());
|
||||||
cache.setResourceLimit(SolrCache.SIZE_PARAM, 5);
|
cache.setMaxSize(5);
|
||||||
// should not happen yet - evictions are triggered by put
|
// should not happen yet - evictions are triggered by put
|
||||||
assertEquals(6, cache.size());
|
assertEquals(6, cache.size());
|
||||||
cache.put("6", new Accountable() {
|
cache.put("6", new Accountable() {
|
||||||
|
@ -219,7 +219,7 @@ public class TestLRUCache extends SolrTestCase {
|
||||||
assertEquals(5, cache.size());
|
assertEquals(5, cache.size());
|
||||||
|
|
||||||
// modify ram limit
|
// modify ram limit
|
||||||
cache.setResourceLimit(SolrCache.MAX_RAM_MB_PARAM, 3);
|
cache.setMaxRamMB(3);
|
||||||
// should not happen yet - evictions are triggered by put
|
// should not happen yet - evictions are triggered by put
|
||||||
assertEquals(5, cache.size());
|
assertEquals(5, cache.size());
|
||||||
cache.put("7", new Accountable() {
|
cache.put("7", new Accountable() {
|
||||||
|
@ -235,7 +235,7 @@ public class TestLRUCache extends SolrTestCase {
|
||||||
|
|
||||||
// scale up
|
// scale up
|
||||||
|
|
||||||
cache.setResourceLimit(SolrCache.MAX_RAM_MB_PARAM, 4);
|
cache.setMaxRamMB(4);
|
||||||
cache.put("8", new Accountable() {
|
cache.put("8", new Accountable() {
|
||||||
@Override
|
@Override
|
||||||
public long ramBytesUsed() {
|
public long ramBytesUsed() {
|
||||||
|
@ -244,7 +244,7 @@ public class TestLRUCache extends SolrTestCase {
|
||||||
});
|
});
|
||||||
assertEquals(4, cache.size());
|
assertEquals(4, cache.size());
|
||||||
|
|
||||||
cache.setResourceLimit(SolrCache.SIZE_PARAM, 10);
|
cache.setMaxSize(10);
|
||||||
for (int i = 0; i < 6; i++) {
|
for (int i = 0; i < 6; i++) {
|
||||||
cache.put("new" + i, new Accountable() {
|
cache.put("new" + i, new Accountable() {
|
||||||
@Override
|
@Override
|
||||||
|
|
Loading…
Reference in New Issue