Internal: Upgrade Guava to 18.0.

17.0 and earlier versions were affected by the following bug
https://code.google.com/p/guava-libraries/issues/detail?id=1761
which caused caches that are configured with weights that are greater than
32GB to actually be unbounded. This is now fixed.

Relates to #6268
Close #7593
This commit is contained in:
Adrien Grand 2014-09-04 15:27:36 +02:00
parent c88f55ab52
commit b49853a619
6 changed files with 4 additions and 130 deletions

View File

@ -191,7 +191,7 @@
<dependency> <dependency>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
<version>17.0</version> <version>18.0</version>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>

View File

@ -34,14 +34,6 @@ import java.util.Locale;
* *
*/ */
public class ByteSizeValue implements Serializable, Streamable { public class ByteSizeValue implements Serializable, Streamable {
/**
* Largest size possible for Guava caches to prevent overflow. Guava's
* caches use integers to track weight per segment and we always 16 segments
* so caches of 32GB would always overflow that integer and they'd never be
* evicted by size. We set this to 31.9GB leaving 100MB of headroom to
* prevent overflow.
*/
public static final ByteSizeValue MAX_GUAVA_CACHE_SIZE = new ByteSizeValue(32 * ByteSizeUnit.C3 - 100 * ByteSizeUnit.C2);
private long size; private long size;

View File

@ -119,16 +119,7 @@ public class IndicesFilterCache extends AbstractComponent implements RemovalList
} }
private void computeSizeInBytes() { private void computeSizeInBytes() {
long sizeInBytes = MemorySizeValue.parseBytesSizeValueOrHeapRatio(size).bytes(); this.sizeInBytes = MemorySizeValue.parseBytesSizeValueOrHeapRatio(size).bytes();
if (sizeInBytes > ByteSizeValue.MAX_GUAVA_CACHE_SIZE.bytes()) {
logger.warn("reducing requested filter cache size of [{}] to the maximum allowed size of [{}]", new ByteSizeValue(sizeInBytes),
ByteSizeValue.MAX_GUAVA_CACHE_SIZE);
sizeInBytes = ByteSizeValue.MAX_GUAVA_CACHE_SIZE.bytes();
// Even though it feels wrong for size and sizeInBytes to get out of
// sync we don't update size here because it might cause the cache
// to be rebuilt every time new settings are applied.
}
this.sizeInBytes = sizeInBytes;
} }
public void addReaderKeyToClean(Object readerKey) { public void addReaderKeyToClean(Object readerKey) {

View File

@ -38,7 +38,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.MemorySizeValue; import org.elasticsearch.common.unit.MemorySizeValue;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
@ -118,13 +117,6 @@ public class IndicesQueryCache extends AbstractComponent implements RemovalListe
private void buildCache() { private void buildCache() {
long sizeInBytes = MemorySizeValue.parseBytesSizeValueOrHeapRatio(size).bytes(); long sizeInBytes = MemorySizeValue.parseBytesSizeValueOrHeapRatio(size).bytes();
if (sizeInBytes > ByteSizeValue.MAX_GUAVA_CACHE_SIZE.bytes()) {
logger.warn("reducing requested query cache size of [{}] to the maximum allowed size of [{}]", new ByteSizeValue(sizeInBytes), ByteSizeValue.MAX_GUAVA_CACHE_SIZE);
sizeInBytes = ByteSizeValue.MAX_GUAVA_CACHE_SIZE.bytes();
// Even though it feels wrong for size and sizeInBytes to get out of
// sync we don't update size here because it might cause the cache
// to be rebuilt every time new settings are applied.
}
CacheBuilder<Key, BytesReference> cacheBuilder = CacheBuilder.newBuilder() CacheBuilder<Key, BytesReference> cacheBuilder = CacheBuilder.newBuilder()
.maximumWeight(sizeInBytes).weigher(new QueryCacheWeigher()).removalListener(this); .maximumWeight(sizeInBytes).weigher(new QueryCacheWeigher()).removalListener(this);

View File

@ -65,14 +65,8 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL
super(settings); super(settings);
this.threadPool = threadPool; this.threadPool = threadPool;
this.indicesFieldDataCacheListener = indicesFieldDataCacheListener; this.indicesFieldDataCacheListener = indicesFieldDataCacheListener;
String size = componentSettings.get("size", "-1"); final String size = componentSettings.get("size", "-1");
long sizeInBytes = componentSettings.getAsMemory("size", "-1").bytes(); final long sizeInBytes = componentSettings.getAsMemory("size", "-1").bytes();
if (sizeInBytes > ByteSizeValue.MAX_GUAVA_CACHE_SIZE.bytes()) {
logger.warn("reducing requested field data cache size of [{}] to the maximum allowed size of [{}]", new ByteSizeValue(sizeInBytes),
ByteSizeValue.MAX_GUAVA_CACHE_SIZE);
sizeInBytes = ByteSizeValue.MAX_GUAVA_CACHE_SIZE.bytes();
size = ByteSizeValue.MAX_GUAVA_CACHE_SIZE.toString();
}
final TimeValue expire = componentSettings.getAsTime("expire", null); final TimeValue expire = componentSettings.getAsTime("expire", null);
CacheBuilder<Key, Accountable> cacheBuilder = CacheBuilder.newBuilder() CacheBuilder<Key, Accountable> cacheBuilder = CacheBuilder.newBuilder()
.removalListener(this); .removalListener(this);

View File

@ -1,95 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.util;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.Weigher;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test;
import static org.hamcrest.Matchers.*;
/**
* Asserts that Guava's caches can get stuck in an overflow state where they
* never clear themselves based on their "weight" policy if the weight grows
* beyond MAX_INT. If the noEvictionIf* methods start failing after upgrading
* Guava then the problem with Guava's caches can probably be considered fixed
* and {@code ByteSizeValue#MAX_GUAVA_CACHE_SIZE} can likely be removed.
*/
public class GuavaCacheOverflowTest extends ElasticsearchTestCase {
private final int tenMeg = ByteSizeValue.parseBytesSizeValue("10MB").bytesAsInt();
private Cache<Integer, Object> cache;
@Test
public void noEvictionIfWeightMaxWeightIs32GB() {
checkNoEviction("32GB");
}
@Test
public void noEvictionIfWeightMaxWeightIsGreaterThan32GB() {
checkNoEviction(between(33, 50) + "GB");
}
@Test
public void evictionIfWeightSlowlyGoesOverMaxWeight() {
buildCache("30GB");
// Add about 100GB of weight to the cache
int entries = 10240;
fillCache(entries);
// And as expected, some are purged.
int missing = 0;
for (int i = 0; i < 31; i++) {
if (cache.getIfPresent(i + tenMeg) == null) {
missing++;
}
}
assertThat(missing, both(greaterThan(0)).and(lessThan(entries)));
}
private void buildCache(String size) {
cache = CacheBuilder.newBuilder().concurrencyLevel(16).maximumWeight(ByteSizeValue.parseBytesSizeValue(size).bytes())
.weigher(new Weigher<Integer, Object>() {
@Override
public int weigh(Integer key, Object value) {
return key;
}
}).build();
}
private void fillCache(int entries) {
for (int i = 0; i < entries; i++) {
cache.put(i + tenMeg, i);
}
}
private void checkNoEviction(String size) {
buildCache(size);
// Adds ~100GB worth of weight to the cache
fillCache(10240);
// But nothing has been purged!
for (int i = 0; i < 10000; i++) {
assertNotNull(cache.getIfPresent(i + tenMeg));
}
}
}