Filter cache to have just weighted (node) and none, and index query parser cache to be size based, closes #1590.
This commit is contained in:
parent
367ecceef6
commit
a18021c778
14
pom.xml
14
pom.xml
|
@ -81,17 +81,10 @@
|
|||
</dependency>
|
||||
|
||||
<!-- START: dependencies that are shaded -->
|
||||
<dependency>
|
||||
<groupId>com.googlecode.concurrentlinkedhashmap</groupId>
|
||||
<artifactId>concurrentlinkedhashmap-lru</artifactId>
|
||||
<version>1.2</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>10.0.1</version>
|
||||
<version>11.0</version>
|
||||
<scope>compile</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
|
@ -315,7 +308,6 @@
|
|||
<minimizeJar>true</minimizeJar>
|
||||
<artifactSet>
|
||||
<includes>
|
||||
<include>com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru</include>
|
||||
<include>com.google.guava:guava</include>
|
||||
<include>net.sf.trove4j:trove4j</include>
|
||||
<include>org.elasticsearch:es-jsr166y</include>
|
||||
|
@ -330,10 +322,6 @@
|
|||
</includes>
|
||||
</artifactSet>
|
||||
<relocations>
|
||||
<relocation>
|
||||
<pattern>com.googlecode.concurrentlinkedhashmap</pattern>
|
||||
<shadedPattern>org.elasticsearch.common.concurrentlinkedhashmap</shadedPattern>
|
||||
</relocation>
|
||||
<relocation>
|
||||
<pattern>com.google.common</pattern>
|
||||
<shadedPattern>org.elasticsearch.common</shadedPattern>
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
package org.elasticsearch.common.cache;
|
||||
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class CacheBuilderHelper {
|
||||
|
||||
private static final Method cacheBuilderDisableStatsMethod;
|
||||
|
||||
static {
|
||||
Method cacheBuilderDisableStatsMethodX = null;
|
||||
try {
|
||||
cacheBuilderDisableStatsMethodX = CacheBuilder.class.getDeclaredMethod("disableStats");
|
||||
cacheBuilderDisableStatsMethodX.setAccessible(true);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
cacheBuilderDisableStatsMethod = cacheBuilderDisableStatsMethodX;
|
||||
}
|
||||
|
||||
public static void disableStats(CacheBuilder cacheBuilder) {
|
||||
if (cacheBuilderDisableStatsMethod != null) {
|
||||
try {
|
||||
cacheBuilderDisableStatsMethod.invoke(cacheBuilder);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,236 +0,0 @@
|
|||
/**
|
||||
* Copyright (C) 2008 Google Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.inject.internal;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.MapMaker;
|
||||
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.Member;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.util.Map;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
|
||||
/**
|
||||
* Utility methods for runtime code generation and class loading. We use this stuff for {@link
|
||||
* net.sf.cglib.reflect.FastClass faster reflection}, {@link net.sf.cglib.proxy.Enhancer method
|
||||
* interceptors} and to proxy circular dependencies.
|
||||
* <p/>
|
||||
* <p>When loading classes, we need to be careful of:
|
||||
* <ul>
|
||||
* <li><strong>Memory leaks.</strong> Generated classes need to be garbage collected in long-lived
|
||||
* applications. Once an injector and any instances it created can be garbage collected, the
|
||||
* corresponding generated classes should be collectable.
|
||||
* <li><strong>Visibility.</strong> Containers like <code>OSGi</code> use class loader boundaries
|
||||
* to enforce modularity at runtime.
|
||||
* </ul>
|
||||
* <p/>
|
||||
* <p>For each generated class, there's multiple class loaders involved:
|
||||
* <ul>
|
||||
* <li><strong>The related class's class loader.</strong> Every generated class services exactly
|
||||
* one user-supplied class. This class loader must be used to access members with private and
|
||||
* package visibility.
|
||||
* <li><strong>Guice's class loader.</strong>
|
||||
* <li><strong>Our bridge class loader.</strong> This is a child of the user's class loader. It
|
||||
* selectively delegates to either the user's class loader (for user classes) or the Guice
|
||||
* class loader (for internal classes that are used by the generated classes). This class
|
||||
* loader that owns the classes generated by Guice.
|
||||
* </ul>
|
||||
*
|
||||
* @author mcculls@gmail.com (Stuart McCulloch)
|
||||
* @author jessewilson@google.com (Jesse Wilson)
|
||||
*/
|
||||
public final class BytecodeGen {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(BytecodeGen.class.getName());
|
||||
|
||||
static final ClassLoader GUICE_CLASS_LOADER = BytecodeGen.class.getClassLoader();
|
||||
|
||||
/**
|
||||
* ie. "com.google.inject.internal"
|
||||
*/
|
||||
private static final String GUICE_INTERNAL_PACKAGE
|
||||
= BytecodeGen.class.getName().replaceFirst("\\.internal\\..*$", ".internal");
|
||||
|
||||
private static final String CGLIB_PACKAGE = " "; // any string that's illegal in a package name
|
||||
|
||||
/**
|
||||
* Use "-Dguice.custom.loader=false" to disable custom classloading.
|
||||
*/
|
||||
static final boolean HOOK_ENABLED
|
||||
= "true".equals(System.getProperty("guice.custom.loader", "true"));
|
||||
|
||||
/**
|
||||
* Weak cache of bridge class loaders that make the Guice implementation
|
||||
* classes visible to various code-generated proxies of client classes.
|
||||
*/
|
||||
private static final Map<ClassLoader, ClassLoader> CLASS_LOADER_CACHE
|
||||
= new MapMaker().weakKeys().weakValues().makeComputingMap(
|
||||
new Function<ClassLoader, ClassLoader>() {
|
||||
public ClassLoader apply(final @Nullable ClassLoader typeClassLoader) {
|
||||
logger.fine("Creating a bridge ClassLoader for " + typeClassLoader);
|
||||
return AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {
|
||||
public ClassLoader run() {
|
||||
return new BridgeClassLoader(typeClassLoader);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* For class loaders, {@code null}, is always an alias to the
|
||||
* {@link ClassLoader#getSystemClassLoader() system class loader}. This method
|
||||
* will not return null.
|
||||
*/
|
||||
private static ClassLoader canonicalize(ClassLoader classLoader) {
|
||||
return classLoader != null
|
||||
? classLoader
|
||||
: checkNotNull(getSystemClassLoaderOrNull(), "Couldn't get a ClassLoader");
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the system classloader, or {@code null} if we don't have
|
||||
* permission.
|
||||
*/
|
||||
private static ClassLoader getSystemClassLoaderOrNull() {
|
||||
try {
|
||||
return ClassLoader.getSystemClassLoader();
|
||||
} catch (SecurityException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the class loader to host generated classes for {@code type}.
|
||||
*/
|
||||
public static ClassLoader getClassLoader(Class<?> type) {
|
||||
return getClassLoader(type, type.getClassLoader());
|
||||
}
|
||||
|
||||
private static ClassLoader getClassLoader(Class<?> type, ClassLoader delegate) {
|
||||
delegate = canonicalize(delegate);
|
||||
|
||||
// if the application is running in the System classloader, assume we can run there too
|
||||
if (delegate == getSystemClassLoaderOrNull()) {
|
||||
return delegate;
|
||||
}
|
||||
|
||||
// Don't bother bridging existing bridge classloaders
|
||||
if (delegate instanceof BridgeClassLoader) {
|
||||
return delegate;
|
||||
}
|
||||
|
||||
if (HOOK_ENABLED && Visibility.forType(type) == Visibility.PUBLIC) {
|
||||
return CLASS_LOADER_CACHE.get(delegate);
|
||||
}
|
||||
|
||||
return delegate;
|
||||
}
|
||||
|
||||
/**
|
||||
* The required visibility of a user's class from a Guice-generated class. Visibility of
|
||||
* package-private members depends on the loading classloader: only if two classes were loaded by
|
||||
* the same classloader can they see each other's package-private members. We need to be careful
|
||||
* when choosing which classloader to use for generated classes. We prefer our bridge classloader,
|
||||
* since it's OSGi-safe and doesn't leak permgen space. But often we cannot due to visibility.
|
||||
*/
|
||||
public enum Visibility {
|
||||
|
||||
/**
|
||||
* Indicates that Guice-generated classes only need to call and override public members of the
|
||||
* target class. These generated classes may be loaded by our bridge classloader.
|
||||
*/
|
||||
PUBLIC {
|
||||
public Visibility and(Visibility that) {
|
||||
return that;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Indicates that Guice-generated classes need to call or override package-private members.
|
||||
* These generated classes must be loaded in the same classloader as the target class. They
|
||||
* won't work with OSGi, and won't get garbage collected until the target class' classloader is
|
||||
* garbage collected.
|
||||
*/
|
||||
SAME_PACKAGE {
|
||||
public Visibility and(Visibility that) {
|
||||
return this;
|
||||
}
|
||||
};
|
||||
|
||||
public static Visibility forMember(Member member) {
|
||||
if ((member.getModifiers() & (Modifier.PROTECTED | Modifier.PUBLIC)) == 0) {
|
||||
return SAME_PACKAGE;
|
||||
}
|
||||
|
||||
Class[] parameterTypes = member instanceof Constructor
|
||||
? ((Constructor) member).getParameterTypes()
|
||||
: ((Method) member).getParameterTypes();
|
||||
for (Class<?> type : parameterTypes) {
|
||||
if (forType(type) == SAME_PACKAGE) {
|
||||
return SAME_PACKAGE;
|
||||
}
|
||||
}
|
||||
|
||||
return PUBLIC;
|
||||
}
|
||||
|
||||
public static Visibility forType(Class<?> type) {
|
||||
return (type.getModifiers() & (Modifier.PROTECTED | Modifier.PUBLIC)) != 0
|
||||
? PUBLIC
|
||||
: SAME_PACKAGE;
|
||||
}
|
||||
|
||||
public abstract Visibility and(Visibility that);
|
||||
}
|
||||
|
||||
/**
|
||||
* Loader for Guice-generated classes. For referenced classes, this delegates to either either the
|
||||
* user's classloader (which is the parent of this classloader) or Guice's class loader.
|
||||
*/
|
||||
private static class BridgeClassLoader extends ClassLoader {
|
||||
|
||||
public BridgeClassLoader(ClassLoader usersClassLoader) {
|
||||
super(usersClassLoader);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class<?> loadClass(String name, boolean resolve)
|
||||
throws ClassNotFoundException {
|
||||
|
||||
// delegate internal requests to Guice class space
|
||||
if (name.startsWith(GUICE_INTERNAL_PACKAGE) || name.startsWith(CGLIB_PACKAGE)) {
|
||||
try {
|
||||
Class<?> clazz = GUICE_CLASS_LOADER.loadClass(name);
|
||||
if (resolve) {
|
||||
resolveClass(clazz);
|
||||
}
|
||||
return clazz;
|
||||
} catch (Exception e) {
|
||||
// fall back to classic delegation
|
||||
}
|
||||
}
|
||||
|
||||
return super.loadClass(name, resolve);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -77,7 +77,9 @@ public class ConstructionContext<T> {
|
|||
= new DelegatingInvocationHandler<T>();
|
||||
invocationHandlers.add(invocationHandler);
|
||||
|
||||
ClassLoader classLoader = BytecodeGen.getClassLoader(expectedType);
|
||||
// ES: Replace, since we don't use bytecode gen, just get the type class loader, or system if its null
|
||||
//ClassLoader classLoader = BytecodeGen.getClassLoader(expectedType);
|
||||
ClassLoader classLoader = expectedType.getClassLoader() == null ? ClassLoader.getSystemClassLoader() : expectedType.getClassLoader();
|
||||
return expectedType.cast(Proxy.newProxyInstance(classLoader,
|
||||
new Class[]{expectedType}, invocationHandler));
|
||||
}
|
||||
|
|
|
@ -82,7 +82,7 @@ public final class Modules {
|
|||
* Returns a new module that installs all of {@code modules}.
|
||||
*/
|
||||
public static Module combine(Module... modules) {
|
||||
return combine(ImmutableSet.of(modules));
|
||||
return combine(ImmutableSet.copyOf(modules));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -20,11 +20,13 @@
|
|||
package org.elasticsearch.index.cache.field.data.resident;
|
||||
|
||||
import com.google.common.base.Objects;
|
||||
import com.google.common.collect.MapEvictionListener;
|
||||
import com.google.common.collect.MapMaker;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
import com.google.common.cache.RemovalNotification;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.cache.CacheBuilderHelper;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.metrics.CounterMetric;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -35,13 +37,12 @@ import org.elasticsearch.index.field.data.FieldData;
|
|||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.index.settings.IndexSettingsService;
|
||||
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class ResidentFieldDataCache extends AbstractConcurrentMapFieldDataCache implements MapEvictionListener<String, FieldData> {
|
||||
public class ResidentFieldDataCache extends AbstractConcurrentMapFieldDataCache implements RemovalListener<String, FieldData> {
|
||||
|
||||
private final IndexSettingsService indexSettingsService;
|
||||
|
||||
|
@ -71,16 +72,16 @@ public class ResidentFieldDataCache extends AbstractConcurrentMapFieldDataCache
|
|||
}
|
||||
|
||||
@Override
|
||||
protected ConcurrentMap<String, FieldData> buildFieldDataMap() {
|
||||
MapMaker mapMaker = new MapMaker();
|
||||
protected Cache<String, FieldData> buildFieldDataMap() {
|
||||
CacheBuilder<String, FieldData> cacheBuilder = CacheBuilder.newBuilder().removalListener(this);
|
||||
if (maxSize != -1) {
|
||||
mapMaker.maximumSize(maxSize);
|
||||
cacheBuilder.maximumSize(maxSize);
|
||||
}
|
||||
if (expire != null) {
|
||||
mapMaker.expireAfterAccess(expire.nanos(), TimeUnit.NANOSECONDS);
|
||||
cacheBuilder.expireAfterAccess(expire.nanos(), TimeUnit.NANOSECONDS);
|
||||
}
|
||||
mapMaker.evictionListener(this);
|
||||
return mapMaker.makeMap();
|
||||
CacheBuilderHelper.disableStats(cacheBuilder);
|
||||
return cacheBuilder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -94,8 +95,10 @@ public class ResidentFieldDataCache extends AbstractConcurrentMapFieldDataCache
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onEviction(@Nullable String s, @Nullable FieldData fieldData) {
|
||||
evictions.inc();
|
||||
public void onRemoval(RemovalNotification<String, FieldData> removalNotification) {
|
||||
if (removalNotification.wasEvicted()) {
|
||||
evictions.inc();
|
||||
}
|
||||
}
|
||||
|
||||
static {
|
||||
|
|
|
@ -19,9 +19,11 @@
|
|||
|
||||
package org.elasticsearch.index.cache.field.data.soft;
|
||||
|
||||
import com.google.common.collect.MapEvictionListener;
|
||||
import com.google.common.collect.MapMaker;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
import com.google.common.cache.RemovalNotification;
|
||||
import org.elasticsearch.common.cache.CacheBuilderHelper;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.metrics.CounterMetric;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -30,12 +32,10 @@ import org.elasticsearch.index.cache.field.data.support.AbstractConcurrentMapFie
|
|||
import org.elasticsearch.index.field.data.FieldData;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SoftFieldDataCache extends AbstractConcurrentMapFieldDataCache implements MapEvictionListener<String, FieldData> {
|
||||
public class SoftFieldDataCache extends AbstractConcurrentMapFieldDataCache implements RemovalListener<String, FieldData> {
|
||||
|
||||
private final CounterMetric evictions = new CounterMetric();
|
||||
|
||||
|
@ -45,8 +45,10 @@ public class SoftFieldDataCache extends AbstractConcurrentMapFieldDataCache impl
|
|||
}
|
||||
|
||||
@Override
|
||||
protected ConcurrentMap<String, FieldData> buildFieldDataMap() {
|
||||
return new MapMaker().softValues().evictionListener(this).makeMap();
|
||||
protected Cache<String, FieldData> buildFieldDataMap() {
|
||||
CacheBuilder<String, FieldData> cacheBuilder = CacheBuilder.newBuilder().softValues().removalListener(this);
|
||||
CacheBuilderHelper.disableStats(cacheBuilder);
|
||||
return cacheBuilder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -60,7 +62,9 @@ public class SoftFieldDataCache extends AbstractConcurrentMapFieldDataCache impl
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onEviction(@Nullable String s, @Nullable FieldData fieldData) {
|
||||
evictions.inc();
|
||||
public void onRemoval(RemovalNotification<String, FieldData> removalNotification) {
|
||||
if (removalNotification.wasEvicted()) {
|
||||
evictions.inc();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.cache.field.data.support;
|
||||
|
||||
import com.google.common.cache.Cache;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -32,7 +33,6 @@ import org.elasticsearch.index.settings.IndexSettings;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
/**
|
||||
|
@ -40,15 +40,13 @@ import java.util.concurrent.ConcurrentMap;
|
|||
*/
|
||||
public abstract class AbstractConcurrentMapFieldDataCache extends AbstractIndexComponent implements FieldDataCache, IndexReader.ReaderFinishedListener {
|
||||
|
||||
private final ConcurrentMap<Object, ConcurrentMap<String, FieldData>> cache;
|
||||
private final ConcurrentMap<Object, Cache<String, FieldData>> cache;
|
||||
|
||||
private final Object creationMutex = new Object();
|
||||
|
||||
protected AbstractConcurrentMapFieldDataCache(Index index, @IndexSettings Settings indexSettings) {
|
||||
super(index, indexSettings);
|
||||
// weak keys is fine, it will only be cleared once IndexReader references will be removed
|
||||
// (assuming clear(...) will not be called)
|
||||
this.cache = new ConcurrentHashMap<Object, ConcurrentMap<String, FieldData>>();
|
||||
this.cache = ConcurrentCollections.newConcurrentMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -58,8 +56,8 @@ public abstract class AbstractConcurrentMapFieldDataCache extends AbstractIndexC
|
|||
|
||||
@Override
|
||||
public void clear(String fieldName) {
|
||||
for (Map.Entry<Object, ConcurrentMap<String, FieldData>> entry : cache.entrySet()) {
|
||||
entry.getValue().remove(fieldName);
|
||||
for (Map.Entry<Object, Cache<String, FieldData>> entry : cache.entrySet()) {
|
||||
entry.getValue().invalidate(fieldName);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -75,19 +73,15 @@ public abstract class AbstractConcurrentMapFieldDataCache extends AbstractIndexC
|
|||
|
||||
@Override
|
||||
public void clear(IndexReader reader) {
|
||||
ConcurrentMap<String, FieldData> map = cache.remove(reader.getCoreCacheKey());
|
||||
// help soft/weak handling GC
|
||||
if (map != null) {
|
||||
map.clear();
|
||||
}
|
||||
cache.remove(reader.getCoreCacheKey());
|
||||
}
|
||||
|
||||
@Override
|
||||
public long sizeInBytes() {
|
||||
// the overhead of the map is not really relevant...
|
||||
long sizeInBytes = 0;
|
||||
for (ConcurrentMap<String, FieldData> map : cache.values()) {
|
||||
for (FieldData fieldData : map.values()) {
|
||||
for (Cache<String, FieldData> map : cache.values()) {
|
||||
for (FieldData fieldData : map.asMap().values()) {
|
||||
sizeInBytes += fieldData.sizeInBytes();
|
||||
}
|
||||
}
|
||||
|
@ -97,8 +91,8 @@ public abstract class AbstractConcurrentMapFieldDataCache extends AbstractIndexC
|
|||
@Override
|
||||
public long sizeInBytes(String fieldName) {
|
||||
long sizeInBytes = 0;
|
||||
for (ConcurrentMap<String, FieldData> map : cache.values()) {
|
||||
FieldData fieldData = map.get(fieldName);
|
||||
for (Cache<String, FieldData> map : cache.values()) {
|
||||
FieldData fieldData = map.getIfPresent(fieldName);
|
||||
if (fieldData != null) {
|
||||
sizeInBytes += fieldData.sizeInBytes();
|
||||
}
|
||||
|
@ -108,7 +102,7 @@ public abstract class AbstractConcurrentMapFieldDataCache extends AbstractIndexC
|
|||
|
||||
@Override
|
||||
public FieldData cache(FieldDataType type, IndexReader reader, String fieldName) throws IOException {
|
||||
ConcurrentMap<String, FieldData> fieldDataCache = cache.get(reader.getCoreCacheKey());
|
||||
Cache<String, FieldData> fieldDataCache = cache.get(reader.getCoreCacheKey());
|
||||
if (fieldDataCache == null) {
|
||||
synchronized (creationMutex) {
|
||||
fieldDataCache = cache.get(reader.getCoreCacheKey());
|
||||
|
@ -119,10 +113,10 @@ public abstract class AbstractConcurrentMapFieldDataCache extends AbstractIndexC
|
|||
}
|
||||
}
|
||||
}
|
||||
FieldData fieldData = fieldDataCache.get(fieldName);
|
||||
FieldData fieldData = fieldDataCache.getIfPresent(fieldName);
|
||||
if (fieldData == null) {
|
||||
synchronized (fieldDataCache) {
|
||||
fieldData = fieldDataCache.get(fieldName);
|
||||
fieldData = fieldDataCache.getIfPresent(fieldName);
|
||||
if (fieldData == null) {
|
||||
fieldData = FieldData.load(type, reader, fieldName);
|
||||
fieldDataCache.put(fieldName, fieldData);
|
||||
|
@ -132,7 +126,5 @@ public abstract class AbstractConcurrentMapFieldDataCache extends AbstractIndexC
|
|||
return fieldData;
|
||||
}
|
||||
|
||||
protected ConcurrentMap<String, FieldData> buildFieldDataMap() {
|
||||
return ConcurrentCollections.newConcurrentMap();
|
||||
}
|
||||
protected abstract Cache<String, FieldData> buildFieldDataMap();
|
||||
}
|
||||
|
|
|
@ -1,66 +0,0 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.cache.field.data.weak;
|
||||
|
||||
import com.google.common.collect.MapEvictionListener;
|
||||
import com.google.common.collect.MapMaker;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.metrics.CounterMetric;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.field.data.support.AbstractConcurrentMapFieldDataCache;
|
||||
import org.elasticsearch.index.field.data.FieldData;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class WeakFieldDataCache extends AbstractConcurrentMapFieldDataCache implements MapEvictionListener<String, FieldData> {
|
||||
|
||||
private final CounterMetric evictions = new CounterMetric();
|
||||
|
||||
@Inject
|
||||
public WeakFieldDataCache(Index index, @IndexSettings Settings indexSettings) {
|
||||
super(index, indexSettings);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ConcurrentMap<String, FieldData> buildFieldDataMap() {
|
||||
return new MapMaker().weakValues().evictionListener(this).makeMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return "weak";
|
||||
}
|
||||
|
||||
@Override
|
||||
public long evictions() {
|
||||
return evictions.count();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onEviction(@Nullable String s, @Nullable FieldData fieldData) {
|
||||
evictions.inc();
|
||||
}
|
||||
}
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.index.cache.filter;
|
|||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.Scopes;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.cache.filter.node.NodeFilterCache;
|
||||
import org.elasticsearch.index.cache.filter.weighted.WeightedFilterCache;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -42,7 +42,7 @@ public class FilterCacheModule extends AbstractModule {
|
|||
@Override
|
||||
protected void configure() {
|
||||
bind(FilterCache.class)
|
||||
.to(settings.getAsClass(FilterCacheSettings.FILTER_CACHE_TYPE, NodeFilterCache.class, "org.elasticsearch.index.cache.filter.", "FilterCache"))
|
||||
.to(settings.getAsClass(FilterCacheSettings.FILTER_CACHE_TYPE, WeightedFilterCache.class, "org.elasticsearch.index.cache.filter.", "FilterCache"))
|
||||
.in(Scopes.SINGLETON);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.cache.filter.node;
|
||||
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.docset.DocSet;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.filter.support.AbstractWeightedFilterCache;
|
||||
import org.elasticsearch.index.cache.filter.support.FilterCacheValue;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.indices.cache.filter.IndicesNodeFilterCache;
|
||||
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
public class NodeFilterCache extends AbstractWeightedFilterCache {
|
||||
|
||||
private final IndicesNodeFilterCache indicesNodeFilterCache;
|
||||
|
||||
@Inject
|
||||
public NodeFilterCache(Index index, @IndexSettings Settings indexSettings, IndicesNodeFilterCache indicesNodeFilterCache) {
|
||||
super(index, indexSettings);
|
||||
this.indicesNodeFilterCache = indicesNodeFilterCache;
|
||||
|
||||
indicesNodeFilterCache.addEvictionListener(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws ElasticSearchException {
|
||||
indicesNodeFilterCache.removeEvictionListener(this);
|
||||
super.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ConcurrentMap<FilterCacheKey, FilterCacheValue<DocSet>> cache() {
|
||||
return indicesNodeFilterCache.cache();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return "node";
|
||||
}
|
||||
}
|
|
@ -1,129 +0,0 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.cache.filter.resident;
|
||||
|
||||
import com.google.common.base.Objects;
|
||||
import com.google.common.collect.MapEvictionListener;
|
||||
import com.google.common.collect.MapMaker;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.docset.DocSet;
|
||||
import org.elasticsearch.common.metrics.CounterMetric;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.filter.support.AbstractConcurrentMapFilterCache;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.index.settings.IndexSettingsService;
|
||||
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* A resident reference based filter cache that has weak keys on the <tt>IndexReader</tt>.
|
||||
*
|
||||
*
|
||||
*/
|
||||
public class ResidentFilterCache extends AbstractConcurrentMapFilterCache implements MapEvictionListener<Filter, DocSet> {
|
||||
|
||||
private final IndexSettingsService indexSettingsService;
|
||||
|
||||
private volatile int maxSize;
|
||||
private volatile TimeValue expire;
|
||||
|
||||
private final CounterMetric evictions = new CounterMetric();
|
||||
|
||||
private final ApplySettings applySettings = new ApplySettings();
|
||||
|
||||
@Inject
|
||||
public ResidentFilterCache(Index index, @IndexSettings Settings indexSettings, IndexSettingsService indexSettingsService) {
|
||||
super(index, indexSettings);
|
||||
this.indexSettingsService = indexSettingsService;
|
||||
this.maxSize = indexSettings.getAsInt("index.cache.filter.max_size", componentSettings.getAsInt("max_size", 1000));
|
||||
this.expire = indexSettings.getAsTime("index.cache.filter.expire", componentSettings.getAsTime("expire", null));
|
||||
logger.debug("using [resident] filter cache with max_size [{}], expire [{}]", maxSize, expire);
|
||||
|
||||
indexSettingsService.addListener(applySettings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
indexSettingsService.removeListener(applySettings);
|
||||
super.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ConcurrentMap<Object, DocSet> buildFilterMap() {
|
||||
MapMaker mapMaker = new MapMaker();
|
||||
if (maxSize != -1) {
|
||||
mapMaker.maximumSize(maxSize);
|
||||
}
|
||||
if (expire != null) {
|
||||
mapMaker.expireAfterAccess(expire.nanos(), TimeUnit.NANOSECONDS);
|
||||
}
|
||||
mapMaker.evictionListener(this);
|
||||
return mapMaker.makeMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return "resident";
|
||||
}
|
||||
|
||||
@Override
|
||||
public long evictions() {
|
||||
return evictions.count();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onEviction(Filter filter, DocSet docSet) {
|
||||
evictions.inc();
|
||||
}
|
||||
|
||||
static {
|
||||
IndexMetaData.addDynamicSettings(
|
||||
"index.cache.field.max_size",
|
||||
"index.cache.field.expire"
|
||||
);
|
||||
}
|
||||
|
||||
class ApplySettings implements IndexSettingsService.Listener {
|
||||
@Override
|
||||
public void onRefreshSettings(Settings settings) {
|
||||
int maxSize = settings.getAsInt("index.cache.filter.max_size", ResidentFilterCache.this.maxSize);
|
||||
TimeValue expire = settings.getAsTime("index.cache.filter.expire", ResidentFilterCache.this.expire);
|
||||
boolean changed = false;
|
||||
if (maxSize != ResidentFilterCache.this.maxSize) {
|
||||
logger.info("updating index.cache.filter.max_size from [{}] to [{}]", ResidentFilterCache.this.maxSize, maxSize);
|
||||
changed = true;
|
||||
ResidentFilterCache.this.maxSize = maxSize;
|
||||
}
|
||||
if (!Objects.equal(expire, ResidentFilterCache.this.expire)) {
|
||||
logger.info("updating index.cache.filter.expire from [{}] to [{}]", ResidentFilterCache.this.expire, expire);
|
||||
changed = true;
|
||||
ResidentFilterCache.this.expire = expire;
|
||||
}
|
||||
if (changed) {
|
||||
clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,131 +0,0 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.cache.filter.soft;
|
||||
|
||||
import com.google.common.base.Objects;
|
||||
import com.google.common.collect.MapEvictionListener;
|
||||
import com.google.common.collect.MapMaker;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.docset.DocSet;
|
||||
import org.elasticsearch.common.metrics.CounterMetric;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.filter.support.AbstractConcurrentMapFilterCache;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.index.settings.IndexSettingsService;
|
||||
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* A soft reference based filter cache that has soft keys on the <tt>IndexReader</tt>.
|
||||
*
|
||||
*
|
||||
*/
|
||||
public class SoftFilterCache extends AbstractConcurrentMapFilterCache implements MapEvictionListener<Filter, DocSet> {
|
||||
|
||||
private final IndexSettingsService indexSettingsService;
|
||||
|
||||
private volatile int maxSize;
|
||||
private volatile TimeValue expire;
|
||||
|
||||
private final CounterMetric evictions = new CounterMetric();
|
||||
|
||||
private final ApplySettings applySettings = new ApplySettings();
|
||||
|
||||
@Inject
|
||||
public SoftFilterCache(Index index, @IndexSettings Settings indexSettings, IndexSettingsService indexSettingsService) {
|
||||
super(index, indexSettings);
|
||||
this.indexSettingsService = indexSettingsService;
|
||||
this.maxSize = indexSettings.getAsInt("index.cache.filter.max_size", componentSettings.getAsInt("max_size", -1));
|
||||
this.expire = indexSettings.getAsTime("index.cache.filter.expire", componentSettings.getAsTime("expire", null));
|
||||
logger.debug("using [soft] filter cache with max_size [{}], expire [{}]", maxSize, expire);
|
||||
|
||||
indexSettingsService.addListener(applySettings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
indexSettingsService.removeListener(applySettings);
|
||||
super.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ConcurrentMap<Object, DocSet> buildFilterMap() {
|
||||
// DocSet are not really stored with strong reference only when searching on them...
|
||||
// Filter might be stored in query cache
|
||||
MapMaker mapMaker = new MapMaker().softValues();
|
||||
if (maxSize != -1) {
|
||||
mapMaker.maximumSize(maxSize);
|
||||
}
|
||||
if (expire != null && expire.nanos() > 0) {
|
||||
mapMaker.expireAfterAccess(expire.nanos(), TimeUnit.NANOSECONDS);
|
||||
}
|
||||
mapMaker.evictionListener(this);
|
||||
return mapMaker.makeMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return "soft";
|
||||
}
|
||||
|
||||
@Override
|
||||
public long evictions() {
|
||||
return evictions.count();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onEviction(Filter filter, DocSet docSet) {
|
||||
evictions.inc();
|
||||
}
|
||||
|
||||
static {
|
||||
IndexMetaData.addDynamicSettings(
|
||||
"index.cache.field.max_size",
|
||||
"index.cache.field.expire"
|
||||
);
|
||||
}
|
||||
|
||||
class ApplySettings implements IndexSettingsService.Listener {
|
||||
@Override
|
||||
public void onRefreshSettings(Settings settings) {
|
||||
int maxSize = settings.getAsInt("index.cache.filter.max_size", SoftFilterCache.this.maxSize);
|
||||
TimeValue expire = settings.getAsTime("index.cache.filter.expire", SoftFilterCache.this.expire);
|
||||
boolean changed = false;
|
||||
if (maxSize != SoftFilterCache.this.maxSize) {
|
||||
logger.info("updating index.cache.filter.max_size from [{}] to [{}]", SoftFilterCache.this.maxSize, maxSize);
|
||||
changed = true;
|
||||
SoftFilterCache.this.maxSize = maxSize;
|
||||
}
|
||||
if (!Objects.equal(expire, SoftFilterCache.this.expire)) {
|
||||
logger.info("updating index.cache.filter.expire from [{}] to [{}]", SoftFilterCache.this.expire, expire);
|
||||
changed = true;
|
||||
SoftFilterCache.this.expire = expire;
|
||||
}
|
||||
if (changed) {
|
||||
clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,176 +0,0 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.cache.filter.support;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.elasticsearch.common.lucene.docset.DocSet;
|
||||
import org.elasticsearch.common.lucene.search.NoCacheFilter;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.filter.FilterCache;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap;
|
||||
|
||||
/**
|
||||
* A base concurrent filter cache that accepts the actual cache to use.
|
||||
*/
|
||||
public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComponent implements FilterCache, IndexReader.ReaderFinishedListener {
|
||||
|
||||
final ConcurrentMap<Object, FilterCacheValue<ConcurrentMap<Object, DocSet>>> cache;
|
||||
|
||||
protected AbstractConcurrentMapFilterCache(Index index, @IndexSettings Settings indexSettings) {
|
||||
super(index, indexSettings);
|
||||
this.cache = buildCache();
|
||||
}
|
||||
|
||||
protected ConcurrentMap<Object, FilterCacheValue<ConcurrentMap<Object, DocSet>>> buildCache() {
|
||||
return new ConcurrentHashMap<Object, FilterCacheValue<ConcurrentMap<Object, DocSet>>>();
|
||||
}
|
||||
|
||||
protected ConcurrentMap<Object, DocSet> buildFilterMap() {
|
||||
return newConcurrentMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
cache.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
cache.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void finished(IndexReader reader) {
|
||||
FilterCacheValue<ConcurrentMap<Object, DocSet>> readerValue = cache.remove(reader.getCoreCacheKey());
|
||||
// help soft/weak handling GC
|
||||
if (readerValue != null) {
|
||||
readerValue.value().clear();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear(IndexReader reader) {
|
||||
FilterCacheValue<ConcurrentMap<Object, DocSet>> readerValue = cache.remove(reader.getCoreCacheKey());
|
||||
// help soft/weak handling GC
|
||||
if (readerValue != null) {
|
||||
readerValue.value().clear();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public EntriesStats entriesStats() {
|
||||
long sizeInBytes = 0;
|
||||
long totalCount = 0;
|
||||
int segmentsCount = 0;
|
||||
for (FilterCacheValue<ConcurrentMap<Object, DocSet>> readerValue : cache.values()) {
|
||||
segmentsCount++;
|
||||
for (DocSet docSet : readerValue.value().values()) {
|
||||
sizeInBytes += docSet.sizeInBytes();
|
||||
totalCount++;
|
||||
}
|
||||
}
|
||||
return new EntriesStats(sizeInBytes, segmentsCount == 0 ? 0 : totalCount / segmentsCount);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Filter cache(Filter filterToCache) {
|
||||
if (filterToCache instanceof NoCacheFilter) {
|
||||
return filterToCache;
|
||||
}
|
||||
if (isCached(filterToCache)) {
|
||||
return filterToCache;
|
||||
}
|
||||
return new FilterCacheFilterWrapper(filterToCache, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCached(Filter filter) {
|
||||
return filter instanceof FilterCacheFilterWrapper;
|
||||
}
|
||||
|
||||
// LUCENE MONITOR: Check next version Lucene for CachingWrapperFilter, consider using that logic
|
||||
// and not use the DeletableConstantScoreQuery, instead pass the DeletesMode enum to the cache method
|
||||
// see: https://issues.apache.org/jira/browse/LUCENE-2468
|
||||
|
||||
static class FilterCacheFilterWrapper extends Filter {
|
||||
|
||||
private final Filter filter;
|
||||
|
||||
private final AbstractConcurrentMapFilterCache cache;
|
||||
|
||||
FilterCacheFilterWrapper(Filter filter, AbstractConcurrentMapFilterCache cache) {
|
||||
this.filter = filter;
|
||||
this.cache = cache;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
|
||||
FilterCacheValue<ConcurrentMap<Object, DocSet>> cacheValue = cache.cache.get(reader.getCoreCacheKey());
|
||||
if (cacheValue == null) {
|
||||
cacheValue = new FilterCacheValue<ConcurrentMap<Object, DocSet>>(cache.buildFilterMap());
|
||||
FilterCacheValue<ConcurrentMap<Object, DocSet>> prev = cache.cache.putIfAbsent(reader.getCoreCacheKey(), cacheValue);
|
||||
if (prev != null) {
|
||||
cacheValue = prev;
|
||||
} else {
|
||||
reader.addReaderFinishedListener(cache);
|
||||
}
|
||||
}
|
||||
Object key = filter;
|
||||
if (filter instanceof CacheKeyFilter) {
|
||||
key = ((CacheKeyFilter) filter).cacheKey();
|
||||
}
|
||||
|
||||
DocSet docSet = cacheValue.value().get(key);
|
||||
if (docSet != null) {
|
||||
return docSet;
|
||||
}
|
||||
DocIdSet docIdSet = filter.getDocIdSet(reader);
|
||||
docSet = FilterCacheValue.cacheable(reader, docIdSet);
|
||||
DocSet prev = cacheValue.value().putIfAbsent(key, docSet);
|
||||
if (prev != null) {
|
||||
docSet = prev;
|
||||
}
|
||||
return docSet == DocSet.EMPTY_DOC_SET ? null : docSet;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "cache(" + filter + ")";
|
||||
}
|
||||
|
||||
public boolean equals(Object o) {
|
||||
if (!(o instanceof FilterCacheFilterWrapper)) return false;
|
||||
return this.filter.equals(((FilterCacheFilterWrapper) o).filter);
|
||||
}
|
||||
|
||||
public int hashCode() {
|
||||
return filter.hashCode() ^ 0x1117BF25;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,129 +0,0 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.cache.filter.weak;
|
||||
|
||||
import com.google.common.base.Objects;
|
||||
import com.google.common.collect.MapEvictionListener;
|
||||
import com.google.common.collect.MapMaker;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.docset.DocSet;
|
||||
import org.elasticsearch.common.metrics.CounterMetric;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.filter.support.AbstractConcurrentMapFilterCache;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.index.settings.IndexSettingsService;
|
||||
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* A weak reference based filter cache that has weak keys on the <tt>IndexReader</tt>.
|
||||
*
|
||||
*
|
||||
*/
|
||||
public class WeakFilterCache extends AbstractConcurrentMapFilterCache implements MapEvictionListener<Filter, DocSet> {
|
||||
|
||||
private final IndexSettingsService indexSettingsService;
|
||||
|
||||
private volatile int maxSize;
|
||||
private volatile TimeValue expire;
|
||||
|
||||
private final CounterMetric evictions = new CounterMetric();
|
||||
|
||||
private final ApplySettings applySettings = new ApplySettings();
|
||||
|
||||
@Inject
|
||||
public WeakFilterCache(Index index, @IndexSettings Settings indexSettings, IndexSettingsService indexSettingsService) {
|
||||
super(index, indexSettings);
|
||||
this.indexSettingsService = indexSettingsService;
|
||||
this.maxSize = indexSettings.getAsInt("index.cache.filter.max_size", componentSettings.getAsInt("max_size", -1));
|
||||
this.expire = indexSettings.getAsTime("index.cache.filter.expire", componentSettings.getAsTime("expire", null));
|
||||
logger.debug("using [weak] filter cache with max_size [{}], expire [{}]", maxSize, expire);
|
||||
|
||||
indexSettingsService.addListener(applySettings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
indexSettingsService.removeListener(applySettings);
|
||||
super.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ConcurrentMap<Object, DocSet> buildFilterMap() {
|
||||
MapMaker mapMaker = new MapMaker().weakValues();
|
||||
if (maxSize != -1) {
|
||||
mapMaker.maximumSize(maxSize);
|
||||
}
|
||||
if (expire != null) {
|
||||
mapMaker.expireAfterAccess(expire.nanos(), TimeUnit.NANOSECONDS);
|
||||
}
|
||||
mapMaker.evictionListener(this);
|
||||
return mapMaker.makeMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return "weak";
|
||||
}
|
||||
|
||||
@Override
|
||||
public long evictions() {
|
||||
return evictions.count();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onEviction(Filter filter, DocSet docSet) {
|
||||
evictions.inc();
|
||||
}
|
||||
|
||||
static {
|
||||
IndexMetaData.addDynamicSettings(
|
||||
"index.cache.field.max_size",
|
||||
"index.cache.field.expire"
|
||||
);
|
||||
}
|
||||
|
||||
class ApplySettings implements IndexSettingsService.Listener {
|
||||
@Override
|
||||
public void onRefreshSettings(Settings settings) {
|
||||
int maxSize = settings.getAsInt("index.cache.filter.max_size", WeakFilterCache.this.maxSize);
|
||||
TimeValue expire = settings.getAsTime("index.cache.filter.expire", WeakFilterCache.this.expire);
|
||||
boolean changed = false;
|
||||
if (maxSize != WeakFilterCache.this.maxSize) {
|
||||
logger.info("updating index.cache.filter.max_size from [{}] to [{}]", WeakFilterCache.this.maxSize, maxSize);
|
||||
changed = true;
|
||||
WeakFilterCache.this.maxSize = maxSize;
|
||||
}
|
||||
if (!Objects.equal(expire, WeakFilterCache.this.expire)) {
|
||||
logger.info("updating index.cache.filter.expire from [{}] to [{}]", WeakFilterCache.this.expire, expire);
|
||||
changed = true;
|
||||
WeakFilterCache.this.expire = expire;
|
||||
}
|
||||
if (changed) {
|
||||
clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -17,14 +17,17 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.cache.filter.support;
|
||||
package org.elasticsearch.index.cache.filter.weighted;
|
||||
|
||||
import com.googlecode.concurrentlinkedhashmap.EvictionListener;
|
||||
import com.googlecode.concurrentlinkedhashmap.Weigher;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
import com.google.common.cache.RemovalNotification;
|
||||
import com.google.common.cache.Weigher;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.docset.DocSet;
|
||||
import org.elasticsearch.common.lucene.search.NoCacheFilter;
|
||||
import org.elasticsearch.common.metrics.CounterMetric;
|
||||
|
@ -34,12 +37,17 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
|||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.filter.FilterCache;
|
||||
import org.elasticsearch.index.cache.filter.support.CacheKeyFilter;
|
||||
import org.elasticsearch.index.cache.filter.support.FilterCacheValue;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.indices.cache.filter.IndicesFilterCache;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
public abstract class AbstractWeightedFilterCache extends AbstractIndexComponent implements FilterCache, IndexReader.ReaderFinishedListener, EvictionListener<AbstractWeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> {
|
||||
public class WeightedFilterCache extends AbstractIndexComponent implements FilterCache, IndexReader.ReaderFinishedListener, RemovalListener<WeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> {
|
||||
|
||||
final IndicesFilterCache indicesFilterCache;
|
||||
|
||||
final ConcurrentMap<Object, Boolean> seenReaders = ConcurrentCollections.newConcurrentMap();
|
||||
final CounterMetric seenReadersCount = new CounterMetric();
|
||||
|
@ -47,15 +55,22 @@ public abstract class AbstractWeightedFilterCache extends AbstractIndexComponent
|
|||
final CounterMetric evictionsMetric = new CounterMetric();
|
||||
final MeanMetric totalMetric = new MeanMetric();
|
||||
|
||||
protected AbstractWeightedFilterCache(Index index, @IndexSettings Settings indexSettings) {
|
||||
@Inject
|
||||
public WeightedFilterCache(Index index, @IndexSettings Settings indexSettings, IndicesFilterCache indicesFilterCache) {
|
||||
super(index, indexSettings);
|
||||
this.indicesFilterCache = indicesFilterCache;
|
||||
indicesFilterCache.addRemovalListener(index.name(), this);
|
||||
}
|
||||
|
||||
protected abstract ConcurrentMap<FilterCacheKey, FilterCacheValue<DocSet>> cache();
|
||||
@Override
|
||||
public String type() {
|
||||
return "weighted";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws ElasticSearchException {
|
||||
clear();
|
||||
indicesFilterCache.removeRemovalListener(index.name());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -66,13 +81,10 @@ public abstract class AbstractWeightedFilterCache extends AbstractIndexComponent
|
|||
return;
|
||||
}
|
||||
seenReadersCount.dec();
|
||||
ConcurrentMap<FilterCacheKey, FilterCacheValue<DocSet>> cache = cache();
|
||||
for (FilterCacheKey key : cache.keySet()) {
|
||||
for (FilterCacheKey key : indicesFilterCache.cache().asMap().keySet()) {
|
||||
if (key.readerKey() == readerKey) {
|
||||
FilterCacheValue<DocSet> removed2 = cache.remove(key);
|
||||
if (removed2 != null) {
|
||||
totalMetric.dec(removed2.value().sizeInBytes());
|
||||
}
|
||||
// invalidate will cause a removal and will be notified
|
||||
indicesFilterCache.cache().invalidate(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -92,13 +104,11 @@ public abstract class AbstractWeightedFilterCache extends AbstractIndexComponent
|
|||
return;
|
||||
}
|
||||
seenReadersCount.dec();
|
||||
ConcurrentMap<FilterCacheKey, FilterCacheValue<DocSet>> cache = cache();
|
||||
for (FilterCacheKey key : cache.keySet()) {
|
||||
Cache<FilterCacheKey, FilterCacheValue<DocSet>> cache = indicesFilterCache.cache();
|
||||
for (FilterCacheKey key : cache.asMap().keySet()) {
|
||||
if (key.readerKey() == reader.getCoreCacheKey()) {
|
||||
FilterCacheValue<DocSet> removed2 = cache.remove(key);
|
||||
if (removed2 != null) {
|
||||
totalMetric.dec(removed2.value().sizeInBytes());
|
||||
}
|
||||
// invalidate will cause a removal and will be notified
|
||||
cache.invalidate(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -134,9 +144,9 @@ public abstract class AbstractWeightedFilterCache extends AbstractIndexComponent
|
|||
|
||||
private final Filter filter;
|
||||
|
||||
private final AbstractWeightedFilterCache cache;
|
||||
private final WeightedFilterCache cache;
|
||||
|
||||
FilterCacheFilterWrapper(Filter filter, AbstractWeightedFilterCache cache) {
|
||||
FilterCacheFilterWrapper(Filter filter, WeightedFilterCache cache) {
|
||||
this.filter = filter;
|
||||
this.cache = cache;
|
||||
}
|
||||
|
@ -147,10 +157,10 @@ public abstract class AbstractWeightedFilterCache extends AbstractIndexComponent
|
|||
if (filter instanceof CacheKeyFilter) {
|
||||
filterKey = ((CacheKeyFilter) filter).cacheKey();
|
||||
}
|
||||
FilterCacheKey cacheKey = new FilterCacheKey(reader.getCoreCacheKey(), filterKey);
|
||||
ConcurrentMap<FilterCacheKey, FilterCacheValue<DocSet>> innerCache = cache.cache();
|
||||
FilterCacheKey cacheKey = new FilterCacheKey(cache.index().name(), reader.getCoreCacheKey(), filterKey);
|
||||
Cache<FilterCacheKey, FilterCacheValue<DocSet>> innerCache = cache.indicesFilterCache.cache();
|
||||
|
||||
FilterCacheValue<DocSet> cacheValue = innerCache.get(cacheKey);
|
||||
FilterCacheValue<DocSet> cacheValue = innerCache.getIfPresent(cacheKey);
|
||||
if (cacheValue == null) {
|
||||
if (!cache.seenReaders.containsKey(reader.getCoreCacheKey())) {
|
||||
Boolean previous = cache.seenReaders.putIfAbsent(reader.getCoreCacheKey(), Boolean.TRUE);
|
||||
|
@ -163,10 +173,10 @@ public abstract class AbstractWeightedFilterCache extends AbstractIndexComponent
|
|||
DocIdSet docIdSet = filter.getDocIdSet(reader);
|
||||
DocSet docSet = FilterCacheValue.cacheable(reader, docIdSet);
|
||||
cacheValue = new FilterCacheValue<DocSet>(docSet);
|
||||
FilterCacheValue<DocSet> previous = innerCache.putIfAbsent(cacheKey, cacheValue);
|
||||
if (previous == null) {
|
||||
cache.totalMetric.inc(cacheValue.value().sizeInBytes());
|
||||
}
|
||||
// we might put the same one concurrently, that's fine, it will be replaced and the removal
|
||||
// will be called
|
||||
cache.totalMetric.inc(cacheValue.value().sizeInBytes());
|
||||
innerCache.put(cacheKey, cacheValue);
|
||||
}
|
||||
|
||||
return cacheValue.value() == DocSet.EMPTY_DOC_SET ? null : cacheValue.value();
|
||||
|
@ -187,39 +197,42 @@ public abstract class AbstractWeightedFilterCache extends AbstractIndexComponent
|
|||
}
|
||||
|
||||
|
||||
// factored by 10
|
||||
public static class FilterCacheValueWeigher implements Weigher<FilterCacheValue<DocSet>> {
|
||||
|
||||
public static final long FACTOR = 10l;
|
||||
public static class FilterCacheValueWeigher implements Weigher<WeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> {
|
||||
|
||||
@Override
|
||||
public int weightOf(FilterCacheValue<DocSet> value) {
|
||||
int weight = (int) Math.min(value.value().sizeInBytes() / 10, Integer.MAX_VALUE);
|
||||
public int weigh(FilterCacheKey key, FilterCacheValue<DocSet> value) {
|
||||
int weight = (int) Math.min(value.value().sizeInBytes(), Integer.MAX_VALUE);
|
||||
return weight == 0 ? 1 : weight;
|
||||
}
|
||||
}
|
||||
|
||||
// this will only be called for our index / data, IndicesFilterCache makes sure it works like this based on the
|
||||
// index we register the listener with
|
||||
@Override
|
||||
public void onEviction(FilterCacheKey filterCacheKey, FilterCacheValue<DocSet> docSetFilterCacheValue) {
|
||||
if (filterCacheKey != null) {
|
||||
if (seenReaders.containsKey(filterCacheKey.readerKey())) {
|
||||
evictionsMetric.inc();
|
||||
if (docSetFilterCacheValue != null) {
|
||||
totalMetric.dec(docSetFilterCacheValue.value().sizeInBytes());
|
||||
}
|
||||
}
|
||||
public void onRemoval(RemovalNotification<FilterCacheKey, FilterCacheValue<DocSet>> removalNotification) {
|
||||
if (removalNotification.wasEvicted()) {
|
||||
evictionsMetric.inc();
|
||||
}
|
||||
if (removalNotification.getValue() != null) {
|
||||
totalMetric.dec(removalNotification.getValue().value().sizeInBytes());
|
||||
}
|
||||
}
|
||||
|
||||
public static class FilterCacheKey {
|
||||
private final String index;
|
||||
private final Object readerKey;
|
||||
private final Object filterKey;
|
||||
|
||||
public FilterCacheKey(Object readerKey, Object filterKey) {
|
||||
public FilterCacheKey(String index, Object readerKey, Object filterKey) {
|
||||
this.index = index;
|
||||
this.readerKey = readerKey;
|
||||
this.filterKey = filterKey;
|
||||
}
|
||||
|
||||
public String index() {
|
||||
return index;
|
||||
}
|
||||
|
||||
public Object readerKey() {
|
||||
return readerKey;
|
||||
}
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.index.cache.query.parser;
|
|||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.Scopes;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.cache.query.parser.weak.WeakQueryParserCache;
|
||||
import org.elasticsearch.index.cache.query.parser.resident.ResidentQueryParserCache;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -38,7 +38,7 @@ public class QueryParserCacheModule extends AbstractModule {
|
|||
@Override
|
||||
protected void configure() {
|
||||
bind(QueryParserCache.class)
|
||||
.to(settings.getAsClass("index.cache.query.parser.type", WeakQueryParserCache.class, "org.elasticsearch.index.cache.query.parser.", "QueryParserCache"))
|
||||
.to(settings.getAsClass("index.cache.query.parser.type", ResidentQueryParserCache.class, "org.elasticsearch.index.cache.query.parser.", "QueryParserCache"))
|
||||
.in(Scopes.SINGLETON);
|
||||
}
|
||||
}
|
||||
|
|
86
src/main/java/org/elasticsearch/index/cache/query/parser/resident/ResidentQueryParserCache.java
vendored
Normal file
86
src/main/java/org/elasticsearch/index/cache/query/parser/resident/ResidentQueryParserCache.java
vendored
Normal file
|
@ -0,0 +1,86 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.cache.query.parser.resident;
|
||||
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import org.apache.lucene.queryParser.QueryParserSettings;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.common.cache.CacheBuilderHelper;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.query.parser.QueryParserCache;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* A small (by default) query parser cache mainly to not parse the same query string several times
|
||||
* if several shards exists on the same node.
|
||||
*/
|
||||
public class ResidentQueryParserCache extends AbstractIndexComponent implements QueryParserCache {
|
||||
|
||||
private final Cache<QueryParserSettings, Query> cache;
|
||||
|
||||
private volatile int maxSize;
|
||||
private volatile TimeValue expire;
|
||||
|
||||
@Inject
|
||||
public ResidentQueryParserCache(Index index, @IndexSettings Settings indexSettings) {
|
||||
super(index, indexSettings);
|
||||
|
||||
this.maxSize = indexSettings.getAsInt("index.cache.field.max_size", componentSettings.getAsInt("max_size", 100));
|
||||
this.expire = indexSettings.getAsTime("index.cache.field.expire", componentSettings.getAsTime("expire", null));
|
||||
logger.debug("using [resident] query cache with max_size [{}], expire [{}]", maxSize, expire);
|
||||
|
||||
CacheBuilder cacheBuilder = CacheBuilder.newBuilder().maximumSize(maxSize);
|
||||
if (expire != null) {
|
||||
cacheBuilder.expireAfterAccess(expire.nanos(), TimeUnit.NANOSECONDS);
|
||||
}
|
||||
|
||||
CacheBuilderHelper.disableStats(cacheBuilder);
|
||||
|
||||
this.cache = cacheBuilder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query get(QueryParserSettings queryString) {
|
||||
return cache.getIfPresent(queryString);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(QueryParserSettings queryString, Query query) {
|
||||
cache.put(queryString, query);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
cache.invalidateAll();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws ElasticSearchException {
|
||||
cache.invalidateAll();
|
||||
}
|
||||
}
|
|
@ -1,40 +0,0 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.cache.query.parser.soft;
|
||||
|
||||
import com.google.common.collect.MapMaker;
|
||||
import org.apache.lucene.queryParser.QueryParserSettings;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.query.parser.support.AbstractJvmQueryParserCache;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SoftQueryParserCache extends AbstractJvmQueryParserCache {
|
||||
|
||||
@Inject
|
||||
public SoftQueryParserCache(Index index, @IndexSettings Settings indexSettings) {
|
||||
super(index, indexSettings, new MapMaker().softValues().<QueryParserSettings, Query>makeMap());
|
||||
}
|
||||
}
|
|
@ -1,40 +0,0 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.cache.query.parser.weak;
|
||||
|
||||
import com.google.common.collect.MapMaker;
|
||||
import org.apache.lucene.queryParser.QueryParserSettings;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.query.parser.support.AbstractJvmQueryParserCache;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class WeakQueryParserCache extends AbstractJvmQueryParserCache {
|
||||
|
||||
@Inject
|
||||
public WeakQueryParserCache(Index index, @IndexSettings Settings indexSettings) {
|
||||
super(index, indexSettings, new MapMaker().weakValues().<QueryParserSettings, Query>makeMap());
|
||||
}
|
||||
}
|
|
@ -25,7 +25,7 @@ import org.elasticsearch.common.inject.Module;
|
|||
import org.elasticsearch.common.inject.SpawnModules;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.indices.analysis.IndicesAnalysisModule;
|
||||
import org.elasticsearch.indices.cache.filter.IndicesNodeFilterCache;
|
||||
import org.elasticsearch.indices.cache.filter.IndicesFilterCache;
|
||||
import org.elasticsearch.indices.cluster.IndicesClusterStateService;
|
||||
import org.elasticsearch.indices.memory.IndexingMemoryController;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesModule;
|
||||
|
@ -63,7 +63,7 @@ public class IndicesModule extends AbstractModule implements SpawnModules {
|
|||
|
||||
bind(IndicesClusterStateService.class).asEagerSingleton();
|
||||
bind(IndexingMemoryController.class).asEagerSingleton();
|
||||
bind(IndicesNodeFilterCache.class).asEagerSingleton();
|
||||
bind(IndicesFilterCache.class).asEagerSingleton();
|
||||
bind(TransportNodesListShardStoreMetaData.class).asEagerSingleton();
|
||||
bind(IndicesTTLService.class).asEagerSingleton();
|
||||
}
|
||||
|
|
154
src/main/java/org/elasticsearch/indices/cache/filter/IndicesFilterCache.java
vendored
Normal file
154
src/main/java/org/elasticsearch/indices/cache/filter/IndicesFilterCache.java
vendored
Normal file
|
@ -0,0 +1,154 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.indices.cache.filter;
|
||||
|
||||
import com.google.common.base.Objects;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
import com.google.common.cache.RemovalNotification;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.cache.CacheBuilderHelper;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.docset.DocSet;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.cache.filter.support.FilterCacheValue;
|
||||
import org.elasticsearch.index.cache.filter.weighted.WeightedFilterCache;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class IndicesFilterCache extends AbstractComponent implements RemovalListener<WeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> {
|
||||
|
||||
private Cache<WeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> cache;
|
||||
|
||||
private volatile String size;
|
||||
private volatile long sizeInBytes;
|
||||
private volatile TimeValue expire;
|
||||
|
||||
private volatile Map<String, RemovalListener<WeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>>> removalListeners =
|
||||
ImmutableMap.of();
|
||||
|
||||
|
||||
static {
|
||||
MetaData.addDynamicSettings(
|
||||
"indices.cache.filter.size",
|
||||
"indices.cache.filter.expire"
|
||||
);
|
||||
}
|
||||
|
||||
class ApplySettings implements NodeSettingsService.Listener {
|
||||
@Override
|
||||
public void onRefreshSettings(Settings settings) {
|
||||
boolean replace = false;
|
||||
String size = settings.get("indices.cache.filter.size", IndicesFilterCache.this.size);
|
||||
if (!size.equals(IndicesFilterCache.this.size)) {
|
||||
logger.info("updating [indices.cache.filter.size] from [{}] to [{}]", IndicesFilterCache.this.size, size);
|
||||
IndicesFilterCache.this.size = size;
|
||||
replace = true;
|
||||
}
|
||||
TimeValue expire = settings.getAsTime("indices.cache.filter.expire", IndicesFilterCache.this.expire);
|
||||
if (!Objects.equal(expire, IndicesFilterCache.this.expire)) {
|
||||
logger.info("updating [indices.cache.filter.expire] from [{}] to [{}]", IndicesFilterCache.this.expire, expire);
|
||||
IndicesFilterCache.this.expire = expire;
|
||||
replace = true;
|
||||
}
|
||||
if (replace) {
|
||||
Cache<WeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> oldCache = IndicesFilterCache.this.cache;
|
||||
computeSizeInBytes();
|
||||
buildCache();
|
||||
oldCache.invalidateAll();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Inject
|
||||
public IndicesFilterCache(Settings settings, NodeSettingsService nodeSettingsService) {
|
||||
super(settings);
|
||||
this.size = componentSettings.get("size", "20%");
|
||||
this.expire = componentSettings.getAsTime("expire", null);
|
||||
computeSizeInBytes();
|
||||
buildCache();
|
||||
logger.debug("using [node] filter cache with size [{}], actual_size [{}]", size, new ByteSizeValue(sizeInBytes));
|
||||
|
||||
nodeSettingsService.addListener(new ApplySettings());
|
||||
}
|
||||
|
||||
private void buildCache() {
|
||||
CacheBuilder<WeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> cacheBuilder = CacheBuilder.newBuilder()
|
||||
.removalListener(this)
|
||||
.maximumWeight(sizeInBytes).weigher(new WeightedFilterCache.FilterCacheValueWeigher());
|
||||
|
||||
// defaults to 4, but this is a busy map for all indices, increase it a bit
|
||||
cacheBuilder.concurrencyLevel(8);
|
||||
|
||||
if (expire != null) {
|
||||
cacheBuilder.expireAfterAccess(expire.millis(), TimeUnit.MILLISECONDS);
|
||||
}
|
||||
|
||||
CacheBuilderHelper.disableStats(cacheBuilder);
|
||||
|
||||
cache = cacheBuilder.build();
|
||||
}
|
||||
|
||||
private void computeSizeInBytes() {
|
||||
if (size.endsWith("%")) {
|
||||
double percent = Double.parseDouble(size.substring(0, size.length() - 1));
|
||||
sizeInBytes = (long) ((percent / 100) * JvmInfo.jvmInfo().getMem().getHeapMax().bytes());
|
||||
} else {
|
||||
sizeInBytes = ByteSizeValue.parseBytesSizeValue(size).bytes();
|
||||
}
|
||||
}
|
||||
|
||||
public synchronized void addRemovalListener(String index, RemovalListener<WeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> listener) {
|
||||
removalListeners = MapBuilder.newMapBuilder(removalListeners).put(index, listener).immutableMap();
|
||||
}
|
||||
|
||||
public synchronized void removeRemovalListener(String index) {
|
||||
removalListeners = MapBuilder.newMapBuilder(removalListeners).remove(index).immutableMap();
|
||||
}
|
||||
|
||||
public void close() {
|
||||
cache.invalidateAll();
|
||||
}
|
||||
|
||||
public Cache<WeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> cache() {
|
||||
return this.cache;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRemoval(RemovalNotification<WeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> removalNotification) {
|
||||
WeightedFilterCache.FilterCacheKey key = removalNotification.getKey();
|
||||
if (key == null) {
|
||||
return;
|
||||
}
|
||||
RemovalListener<WeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> listener = removalListeners.get(key.index());
|
||||
if (listener != null) {
|
||||
listener.onRemoval(removalNotification);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,123 +0,0 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.indices.cache.filter;
|
||||
|
||||
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap;
|
||||
import com.googlecode.concurrentlinkedhashmap.EvictionListener;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.docset.DocSet;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.cache.filter.support.AbstractWeightedFilterCache;
|
||||
import org.elasticsearch.index.cache.filter.support.FilterCacheValue;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class IndicesNodeFilterCache extends AbstractComponent implements EvictionListener<AbstractWeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> {
|
||||
|
||||
private final ThreadPool threadPool;
|
||||
|
||||
private ConcurrentMap<AbstractWeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> cache;
|
||||
|
||||
private volatile String size;
|
||||
private volatile long sizeInBytes;
|
||||
|
||||
private final CopyOnWriteArrayList<EvictionListener<AbstractWeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>>> evictionListeners =
|
||||
new CopyOnWriteArrayList<EvictionListener<AbstractWeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>>>();
|
||||
|
||||
@Inject
|
||||
public IndicesNodeFilterCache(Settings settings, ThreadPool threadPool, NodeSettingsService nodeSettingsService) {
|
||||
super(settings);
|
||||
this.threadPool = threadPool;
|
||||
this.size = componentSettings.get("size", "20%");
|
||||
computeSizeInBytes();
|
||||
buildCache();
|
||||
logger.debug("using [node] filter cache with size [{}], actual_size [{}]", size, new ByteSizeValue(sizeInBytes));
|
||||
|
||||
nodeSettingsService.addListener(new ApplySettings());
|
||||
}
|
||||
|
||||
private void buildCache() {
|
||||
TimeValue catchupTime = componentSettings.getAsTime("catchup", TimeValue.timeValueSeconds(10));
|
||||
|
||||
int weightedSize = (int) Math.min(sizeInBytes / AbstractWeightedFilterCache.FilterCacheValueWeigher.FACTOR, Integer.MAX_VALUE);
|
||||
|
||||
cache = new ConcurrentLinkedHashMap.Builder<AbstractWeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>>()
|
||||
.maximumWeightedCapacity(weightedSize)
|
||||
.weigher(new AbstractWeightedFilterCache.FilterCacheValueWeigher())
|
||||
.listener(this)
|
||||
.catchup(this.threadPool.scheduler(), catchupTime.millis(), TimeUnit.MILLISECONDS)
|
||||
.build();
|
||||
}
|
||||
|
||||
private void computeSizeInBytes() {
|
||||
if (size.endsWith("%")) {
|
||||
double percent = Double.parseDouble(size.substring(0, size.length() - 1));
|
||||
sizeInBytes = (long) ((percent / 100) * JvmInfo.jvmInfo().getMem().getHeapMax().bytes());
|
||||
} else {
|
||||
sizeInBytes = ByteSizeValue.parseBytesSizeValue(size).bytes();
|
||||
}
|
||||
}
|
||||
|
||||
public void addEvictionListener(EvictionListener<AbstractWeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> listener) {
|
||||
evictionListeners.add(listener);
|
||||
}
|
||||
|
||||
public void removeEvictionListener(EvictionListener<AbstractWeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> listener) {
|
||||
evictionListeners.remove(listener);
|
||||
}
|
||||
|
||||
public void close() {
|
||||
cache.clear();
|
||||
}
|
||||
|
||||
public ConcurrentMap<AbstractWeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> cache() {
|
||||
return this.cache;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onEviction(AbstractWeightedFilterCache.FilterCacheKey filterCacheKey, FilterCacheValue<DocSet> docSetFilterCacheValue) {
|
||||
for (EvictionListener<AbstractWeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> listener : evictionListeners) {
|
||||
listener.onEviction(filterCacheKey, docSetFilterCacheValue);
|
||||
}
|
||||
}
|
||||
|
||||
class ApplySettings implements NodeSettingsService.Listener {
|
||||
@Override
|
||||
public void onRefreshSettings(Settings settings) {
|
||||
String size = settings.get("indices.cache.filter.size", IndicesNodeFilterCache.this.size);
|
||||
if (!size.equals(IndicesNodeFilterCache.this.size)) {
|
||||
logger.info("updating [indices.cache.filter.size] from [{}] to [{}]", IndicesNodeFilterCache.this.size, size);
|
||||
IndicesNodeFilterCache.this.size = size;
|
||||
ConcurrentMap<AbstractWeightedFilterCache.FilterCacheKey, FilterCacheValue<DocSet>> oldCache = IndicesNodeFilterCache.this.cache;
|
||||
computeSizeInBytes();
|
||||
buildCache();
|
||||
oldCache.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -59,7 +59,7 @@ import org.elasticsearch.http.HttpServer;
|
|||
import org.elasticsearch.http.HttpServerModule;
|
||||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.indices.cache.filter.IndicesNodeFilterCache;
|
||||
import org.elasticsearch.indices.cache.filter.IndicesFilterCache;
|
||||
import org.elasticsearch.indices.cluster.IndicesClusterStateService;
|
||||
import org.elasticsearch.indices.memory.IndexingMemoryController;
|
||||
import org.elasticsearch.indices.ttl.IndicesTTLService;
|
||||
|
@ -271,7 +271,7 @@ public final class InternalNode implements Node {
|
|||
stopWatch.stop().start("indices_cluster");
|
||||
injector.getInstance(IndicesClusterStateService.class).close();
|
||||
stopWatch.stop().start("indices");
|
||||
injector.getInstance(IndicesNodeFilterCache.class).close();
|
||||
injector.getInstance(IndicesFilterCache.class).close();
|
||||
injector.getInstance(IndexingMemoryController.class).close();
|
||||
injector.getInstance(IndicesTTLService.class).close();
|
||||
injector.getInstance(IndicesService.class).close();
|
||||
|
|
|
@ -19,9 +19,10 @@
|
|||
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.MapMaker;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
|
@ -54,7 +55,8 @@ public class ScriptService extends AbstractComponent {
|
|||
|
||||
private final ConcurrentMap<String, CompiledScript> staticCache = ConcurrentCollections.newConcurrentMap();
|
||||
|
||||
private final ConcurrentMap<CacheKey, CompiledScript> cache = new MapMaker().softValues().makeMap();
|
||||
// TODO expose some cache aspects like expiration and max size
|
||||
private final Cache<CacheKey, CompiledScript> cache = CacheBuilder.newBuilder().build();
|
||||
|
||||
public ScriptService(Settings settings) {
|
||||
this(settings, new Environment(), ImmutableSet.<ScriptEngineService>builder()
|
||||
|
@ -141,7 +143,7 @@ public class ScriptService extends AbstractComponent {
|
|||
lang = defaultLang;
|
||||
}
|
||||
CacheKey cacheKey = new CacheKey(lang, script);
|
||||
compiled = cache.get(cacheKey);
|
||||
compiled = cache.getIfPresent(cacheKey);
|
||||
if (compiled != null) {
|
||||
return compiled;
|
||||
}
|
||||
|
@ -180,7 +182,7 @@ public class ScriptService extends AbstractComponent {
|
|||
}
|
||||
|
||||
public void clear() {
|
||||
cache.clear();
|
||||
cache.invalidateAll();
|
||||
}
|
||||
|
||||
public static class CacheKey {
|
||||
|
|
|
@ -31,9 +31,6 @@ import org.elasticsearch.common.lucene.search.TermFilter;
|
|||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.filter.FilterCache;
|
||||
import org.elasticsearch.index.cache.filter.none.NoneFilterCache;
|
||||
import org.elasticsearch.index.cache.filter.soft.SoftFilterCache;
|
||||
import org.elasticsearch.index.cache.filter.weak.WeakFilterCache;
|
||||
import org.elasticsearch.index.settings.IndexSettingsService;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -55,16 +52,6 @@ public class FilterCacheTests {
|
|||
verifyCache(new NoneFilterCache(new Index("test"), EMPTY_SETTINGS));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSoftCache() throws Exception {
|
||||
verifyCache(new SoftFilterCache(new Index("test"), EMPTY_SETTINGS, new IndexSettingsService(new Index("test"), EMPTY_SETTINGS)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWeakCache() throws Exception {
|
||||
verifyCache(new WeakFilterCache(new Index("test"), EMPTY_SETTINGS, new IndexSettingsService(new Index("test"), EMPTY_SETTINGS)));
|
||||
}
|
||||
|
||||
private void verifyCache(FilterCache filterCache) throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
|
|
Loading…
Reference in New Issue