cache recycler now node/client level component

This commit is contained in:
Shay Banon 2013-07-13 01:00:45 +02:00
parent 17936fabb0
commit 9f6117612c
62 changed files with 615 additions and 517 deletions

View File

@ -24,13 +24,13 @@ import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.service.IndexService;
@ -51,13 +51,16 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastOperatio
private final IndicesService indicesService;
private final IndicesTermsFilterCache termsFilterCache;
private final CacheRecycler cacheRecycler;
@Inject
public TransportClearIndicesCacheAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
TransportService transportService, IndicesService indicesService, IndicesTermsFilterCache termsFilterCache) {
TransportService transportService, IndicesService indicesService, IndicesTermsFilterCache termsFilterCache,
CacheRecycler cacheRecycler) {
super(settings, threadPool, clusterService, transportService);
this.indicesService = indicesService;
this.termsFilterCache = termsFilterCache;
this.cacheRecycler = cacheRecycler;
}
@Override
@ -147,7 +150,7 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastOperatio
if (request.recycler()) {
logger.info("Clear CacheRecycler on index [{}]", service.index());
clearedAtLeastOne = true;
CacheRecycler.clear();
cacheRecycler.clear();
}
if (request.idCache()) {
clearedAtLeastOne = true;

View File

@ -25,6 +25,7 @@ import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
@ -61,11 +62,14 @@ public class TransportValidateQueryAction extends TransportBroadcastOperationAct
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
@Inject
public TransportValidateQueryAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ScriptService scriptService) {
public TransportValidateQueryAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ScriptService scriptService, CacheRecycler cacheRecycler) {
super(settings, threadPool, clusterService, transportService);
this.indicesService = indicesService;
this.scriptService = scriptService;
this.cacheRecycler = cacheRecycler;
}
@Override
@ -168,7 +172,7 @@ public class TransportValidateQueryAction extends TransportBroadcastOperationAct
SearchContext.setCurrent(new SearchContext(0,
new ShardSearchRequest().types(request.types()),
null, indexShard.searcher(), indexService, indexShard,
scriptService));
scriptService, cacheRecycler));
try {
ParsedQuery parsedQuery = queryParserService.parse(request.querySource());
valid = true;

View File

@ -24,6 +24,7 @@ import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
@ -62,12 +63,15 @@ public class TransportCountAction extends TransportBroadcastOperationAction<Coun
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
@Inject
public TransportCountAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService,
IndicesService indicesService, ScriptService scriptService) {
IndicesService indicesService, ScriptService scriptService, CacheRecycler cacheRecycler) {
super(settings, threadPool, clusterService, transportService);
this.indicesService = indicesService;
this.scriptService = scriptService;
this.cacheRecycler = cacheRecycler;
}
@Override
@ -150,7 +154,7 @@ public class TransportCountAction extends TransportBroadcastOperationAction<Coun
SearchContext context = new SearchContext(0,
new ShardSearchRequest().types(request.types()).filteringAliases(request.filteringAliases()),
shardTarget, indexShard.searcher(), indexService, indexShard,
scriptService);
scriptService, cacheRecycler);
SearchContext.setCurrent(context);
try {

View File

@ -23,6 +23,7 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.action.support.single.shard.TransportShardSingleOperationAction;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
@ -61,13 +62,16 @@ public class TransportExplainAction extends TransportShardSingleOperationAction<
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
@Inject
public TransportExplainAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
TransportService transportService, IndicesService indicesService,
ScriptService scriptService) {
ScriptService scriptService, CacheRecycler cacheRecycler) {
super(settings, threadPool, clusterService, transportService);
this.indicesService = indicesService;
this.scriptService = scriptService;
this.cacheRecycler = cacheRecycler;
}
protected String transportAction() {
@ -99,7 +103,7 @@ public class TransportExplainAction extends TransportShardSingleOperationAction<
new ShardSearchRequest().types(new String[]{request.type()})
.filteringAliases(request.filteringAlias()),
null, result.searcher(), indexService, indexShard,
scriptService
scriptService, cacheRecycler
);
SearchContext.setCurrent(context);

View File

@ -0,0 +1,100 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cache.recycler;
import gnu.trove.map.hash.*;
import gnu.trove.set.hash.THashSet;
import org.elasticsearch.common.trove.ExtTDoubleObjectHashMap;
import org.elasticsearch.common.trove.ExtTHashMap;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
public interface CacheRecycler {
public abstract void clear();
public abstract <K, V> ExtTHashMap<K, V> popHashMap();
public abstract void pushHashMap(ExtTHashMap map);
public abstract <T> THashSet<T> popHashSet();
public abstract void pushHashSet(THashSet map);
public abstract <T> ExtTDoubleObjectHashMap<T> popDoubleObjectMap();
public abstract void pushDoubleObjectMap(ExtTDoubleObjectHashMap map);
public abstract <T> ExtTLongObjectHashMap<T> popLongObjectMap();
public abstract void pushLongObjectMap(ExtTLongObjectHashMap map);
public abstract TLongLongHashMap popLongLongMap();
public abstract void pushLongLongMap(TLongLongHashMap map);
public abstract TIntIntHashMap popIntIntMap();
public abstract void pushIntIntMap(TIntIntHashMap map);
public abstract TFloatIntHashMap popFloatIntMap();
public abstract void pushFloatIntMap(TFloatIntHashMap map);
public abstract TDoubleIntHashMap popDoubleIntMap();
public abstract void pushDoubleIntMap(TDoubleIntHashMap map);
public abstract TByteIntHashMap popByteIntMap();
public abstract void pushByteIntMap(TByteIntHashMap map);
public abstract TShortIntHashMap popShortIntMap();
public abstract void pushShortIntMap(TShortIntHashMap map);
public abstract TLongIntHashMap popLongIntMap();
public abstract void pushLongIntMap(TLongIntHashMap map);
public abstract <T> TObjectIntHashMap<T> popObjectIntMap();
public abstract <T> void pushObjectIntMap(TObjectIntHashMap<T> map);
public abstract <T> TIntObjectHashMap<T> popIntObjectMap();
public abstract <T> void pushIntObjectMap(TIntObjectHashMap<T> map);
public abstract <T> TObjectFloatHashMap<T> popObjectFloatMap();
public abstract <T> void pushObjectFloatMap(TObjectFloatHashMap<T> map);
public abstract Object[] popObjectArray(int size);
public abstract void pushObjectArray(Object[] objects);
public abstract int[] popIntArray(int size);
public abstract int[] popIntArray(int size, int sentinal);
public abstract void pushIntArray(int[] ints);
public abstract void pushIntArray(int[] ints, int sentinal);
}

View File

@ -0,0 +1,44 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cache.recycler;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.settings.Settings;
/**
*/
public class CacheRecyclerModule extends AbstractModule {
private final Settings settings;
public CacheRecyclerModule(Settings settings) {
this.settings = settings;
}
@Override
protected void configure() {
String type = settings.get("node.cache.recycler.type", "default");
if ("none".equalsIgnoreCase(type)) {
bind(CacheRecycler.class).to(NoneCacheRecycler.class).asEagerSingleton();
} else {
bind(CacheRecycler.class).to(DefaultCacheRecycler.class).asEagerSingleton();
}
}
}

View File

@ -17,32 +17,21 @@
* under the License.
*/
package org.elasticsearch.common;
package org.elasticsearch.cache.recycler;
import gnu.trove.map.hash.TByteIntHashMap;
import gnu.trove.map.hash.TDoubleIntHashMap;
import gnu.trove.map.hash.TFloatIntHashMap;
import gnu.trove.map.hash.TIntIntHashMap;
import gnu.trove.map.hash.TIntObjectHashMap;
import gnu.trove.map.hash.TLongIntHashMap;
import gnu.trove.map.hash.TLongLongHashMap;
import gnu.trove.map.hash.TObjectFloatHashMap;
import gnu.trove.map.hash.TObjectIntHashMap;
import gnu.trove.map.hash.TShortIntHashMap;
import gnu.trove.map.hash.*;
import gnu.trove.set.hash.THashSet;
import java.lang.ref.SoftReference;
import java.util.Arrays;
import java.util.Queue;
import org.elasticsearch.common.trove.ExtTDoubleObjectHashMap;
import org.elasticsearch.common.trove.ExtTHashMap;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
public class DefaultCacheRecycler implements Recycler {
import java.lang.ref.SoftReference;
import java.util.Arrays;
import java.util.Queue;
public class DefaultCacheRecycler implements CacheRecycler {
@Override
public void clear() {
hashMap.clear();
@ -86,7 +75,7 @@ public class DefaultCacheRecycler implements Recycler {
private final SoftWrapper<Queue<ExtTHashMap>> hashMap = new SoftWrapper<Queue<ExtTHashMap>>();
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popHashMap()
* @see org.elasticsearch.common.CacheRecycler#popHashMap()
*/
@Override
@SuppressWarnings("unchecked")
@ -99,7 +88,7 @@ public class DefaultCacheRecycler implements Recycler {
}
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#pushHashMap(org.elasticsearch.common.trove.ExtTHashMap)
* @see org.elasticsearch.common.CacheRecycler#pushHashMap(org.elasticsearch.common.trove.ExtTHashMap)
*/
@Override
public void pushHashMap(ExtTHashMap map) {
@ -112,7 +101,7 @@ public class DefaultCacheRecycler implements Recycler {
private final SoftWrapper<Queue<THashSet>> hashSet = new SoftWrapper<Queue<THashSet>>();
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popHashSet()
* @see org.elasticsearch.common.CacheRecycler#popHashSet()
*/
@Override
@SuppressWarnings("unchecked")
@ -125,7 +114,7 @@ public class DefaultCacheRecycler implements Recycler {
}
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#pushHashSet(gnu.trove.set.hash.THashSet)
* @see org.elasticsearch.common.CacheRecycler#pushHashSet(gnu.trove.set.hash.THashSet)
*/
@Override
public void pushHashSet(THashSet map) {
@ -138,7 +127,7 @@ public class DefaultCacheRecycler implements Recycler {
private final SoftWrapper<Queue<ExtTDoubleObjectHashMap>> doubleObjectHashMap = new SoftWrapper<Queue<ExtTDoubleObjectHashMap>>();
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popDoubleObjectMap()
* @see org.elasticsearch.common.CacheRecycler#popDoubleObjectMap()
*/
@Override
@SuppressWarnings("unchecked")
@ -151,7 +140,7 @@ public class DefaultCacheRecycler implements Recycler {
}
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#pushDoubleObjectMap(org.elasticsearch.common.trove.ExtTDoubleObjectHashMap)
* @see org.elasticsearch.common.CacheRecycler#pushDoubleObjectMap(org.elasticsearch.common.trove.ExtTDoubleObjectHashMap)
*/
@Override
public void pushDoubleObjectMap(ExtTDoubleObjectHashMap map) {
@ -164,7 +153,7 @@ public class DefaultCacheRecycler implements Recycler {
private final SoftWrapper<Queue<ExtTLongObjectHashMap>> longObjectHashMap = new SoftWrapper<Queue<ExtTLongObjectHashMap>>();
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popLongObjectMap()
* @see org.elasticsearch.common.CacheRecycler#popLongObjectMap()
*/
@Override
@SuppressWarnings("unchecked")
@ -177,7 +166,7 @@ public class DefaultCacheRecycler implements Recycler {
}
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#pushLongObjectMap(org.elasticsearch.common.trove.ExtTLongObjectHashMap)
* @see org.elasticsearch.common.CacheRecycler#pushLongObjectMap(org.elasticsearch.common.trove.ExtTLongObjectHashMap)
*/
@Override
public void pushLongObjectMap(ExtTLongObjectHashMap map) {
@ -190,7 +179,7 @@ public class DefaultCacheRecycler implements Recycler {
private final SoftWrapper<Queue<TLongLongHashMap>> longLongHashMap = new SoftWrapper<Queue<TLongLongHashMap>>();
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popLongLongMap()
* @see org.elasticsearch.common.CacheRecycler#popLongLongMap()
*/
@Override
public TLongLongHashMap popLongLongMap() {
@ -202,7 +191,7 @@ public class DefaultCacheRecycler implements Recycler {
}
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#pushLongLongMap(gnu.trove.map.hash.TLongLongHashMap)
* @see org.elasticsearch.common.CacheRecycler#pushLongLongMap(gnu.trove.map.hash.TLongLongHashMap)
*/
@Override
public void pushLongLongMap(TLongLongHashMap map) {
@ -215,7 +204,7 @@ public class DefaultCacheRecycler implements Recycler {
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popIntIntMap()
* @see org.elasticsearch.common.CacheRecycler#popIntIntMap()
*/
@Override
public TIntIntHashMap popIntIntMap() {
@ -227,7 +216,7 @@ public class DefaultCacheRecycler implements Recycler {
}
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#pushIntIntMap(gnu.trove.map.hash.TIntIntHashMap)
* @see org.elasticsearch.common.CacheRecycler#pushIntIntMap(gnu.trove.map.hash.TIntIntHashMap)
*/
@Override
public void pushIntIntMap(TIntIntHashMap map) {
@ -242,7 +231,7 @@ public class DefaultCacheRecycler implements Recycler {
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popFloatIntMap()
* @see org.elasticsearch.common.CacheRecycler#popFloatIntMap()
*/
@Override
public TFloatIntHashMap popFloatIntMap() {
@ -254,7 +243,7 @@ public class DefaultCacheRecycler implements Recycler {
}
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#pushFloatIntMap(gnu.trove.map.hash.TFloatIntHashMap)
* @see org.elasticsearch.common.CacheRecycler#pushFloatIntMap(gnu.trove.map.hash.TFloatIntHashMap)
*/
@Override
public void pushFloatIntMap(TFloatIntHashMap map) {
@ -269,7 +258,7 @@ public class DefaultCacheRecycler implements Recycler {
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popDoubleIntMap()
* @see org.elasticsearch.common.CacheRecycler#popDoubleIntMap()
*/
@Override
public TDoubleIntHashMap popDoubleIntMap() {
@ -281,7 +270,7 @@ public class DefaultCacheRecycler implements Recycler {
}
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#pushDoubleIntMap(gnu.trove.map.hash.TDoubleIntHashMap)
* @see org.elasticsearch.common.CacheRecycler#pushDoubleIntMap(gnu.trove.map.hash.TDoubleIntHashMap)
*/
@Override
public void pushDoubleIntMap(TDoubleIntHashMap map) {
@ -296,7 +285,7 @@ public class DefaultCacheRecycler implements Recycler {
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popByteIntMap()
* @see org.elasticsearch.common.CacheRecycler#popByteIntMap()
*/
@Override
public TByteIntHashMap popByteIntMap() {
@ -308,7 +297,7 @@ public class DefaultCacheRecycler implements Recycler {
}
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#pushByteIntMap(gnu.trove.map.hash.TByteIntHashMap)
* @see org.elasticsearch.common.CacheRecycler#pushByteIntMap(gnu.trove.map.hash.TByteIntHashMap)
*/
@Override
public void pushByteIntMap(TByteIntHashMap map) {
@ -323,7 +312,7 @@ public class DefaultCacheRecycler implements Recycler {
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popShortIntMap()
* @see org.elasticsearch.common.CacheRecycler#popShortIntMap()
*/
@Override
public TShortIntHashMap popShortIntMap() {
@ -335,7 +324,7 @@ public class DefaultCacheRecycler implements Recycler {
}
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#pushShortIntMap(gnu.trove.map.hash.TShortIntHashMap)
* @see org.elasticsearch.common.CacheRecycler#pushShortIntMap(gnu.trove.map.hash.TShortIntHashMap)
*/
@Override
public void pushShortIntMap(TShortIntHashMap map) {
@ -350,7 +339,7 @@ public class DefaultCacheRecycler implements Recycler {
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popLongIntMap()
* @see org.elasticsearch.common.CacheRecycler#popLongIntMap()
*/
@Override
public TLongIntHashMap popLongIntMap() {
@ -366,7 +355,7 @@ public class DefaultCacheRecycler implements Recycler {
}
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#pushLongIntMap(gnu.trove.map.hash.TLongIntHashMap)
* @see org.elasticsearch.common.CacheRecycler#pushLongIntMap(gnu.trove.map.hash.TLongIntHashMap)
*/
@Override
public void pushLongIntMap(TLongIntHashMap map) {
@ -380,7 +369,7 @@ public class DefaultCacheRecycler implements Recycler {
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popObjectIntMap()
* @see org.elasticsearch.common.CacheRecycler#popObjectIntMap()
*/
@Override
@SuppressWarnings({"unchecked"})
@ -393,7 +382,7 @@ public class DefaultCacheRecycler implements Recycler {
}
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#pushObjectIntMap(gnu.trove.map.hash.TObjectIntHashMap)
* @see org.elasticsearch.common.CacheRecycler#pushObjectIntMap(gnu.trove.map.hash.TObjectIntHashMap)
*/
@Override
public <T> void pushObjectIntMap(TObjectIntHashMap<T> map) {
@ -407,7 +396,7 @@ public class DefaultCacheRecycler implements Recycler {
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popIntObjectMap()
* @see org.elasticsearch.common.CacheRecycler#popIntObjectMap()
*/
@Override
@SuppressWarnings({"unchecked"})
@ -420,7 +409,7 @@ public class DefaultCacheRecycler implements Recycler {
}
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#pushIntObjectMap(gnu.trove.map.hash.TIntObjectHashMap)
* @see org.elasticsearch.common.CacheRecycler#pushIntObjectMap(gnu.trove.map.hash.TIntObjectHashMap)
*/
@Override
public <T> void pushIntObjectMap(TIntObjectHashMap<T> map) {
@ -433,7 +422,7 @@ public class DefaultCacheRecycler implements Recycler {
private final SoftWrapper<Queue<TObjectFloatHashMap>> objectFloatHashMap = new SoftWrapper<Queue<TObjectFloatHashMap>>();
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popObjectFloatMap()
* @see org.elasticsearch.common.CacheRecycler#popObjectFloatMap()
*/
@Override
@SuppressWarnings({"unchecked"})
@ -446,7 +435,7 @@ public class DefaultCacheRecycler implements Recycler {
}
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#pushObjectFloatMap(gnu.trove.map.hash.TObjectFloatHashMap)
* @see org.elasticsearch.common.CacheRecycler#pushObjectFloatMap(gnu.trove.map.hash.TObjectFloatHashMap)
*/
@Override
public <T> void pushObjectFloatMap(TObjectFloatHashMap<T> map) {
@ -459,7 +448,7 @@ public class DefaultCacheRecycler implements Recycler {
private final SoftWrapper<Queue<Object[]>> objectArray = new SoftWrapper<Queue<Object[]>>();
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popObjectArray(int)
* @see org.elasticsearch.common.CacheRecycler#popObjectArray(int)
*/
@Override
public Object[] popObjectArray(int size) {
@ -479,7 +468,7 @@ public class DefaultCacheRecycler implements Recycler {
}
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#pushObjectArray(java.lang.Object[])
* @see org.elasticsearch.common.CacheRecycler#pushObjectArray(java.lang.Object[])
*/
@Override
public void pushObjectArray(Object[] objects) {
@ -491,7 +480,7 @@ public class DefaultCacheRecycler implements Recycler {
private final SoftWrapper<Queue<int[]>> intArray = new SoftWrapper<Queue<int[]>>();
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popIntArray(int)
* @see org.elasticsearch.common.CacheRecycler#popIntArray(int)
*/
@Override
public int[] popIntArray(int size) {
@ -499,7 +488,7 @@ public class DefaultCacheRecycler implements Recycler {
}
/* (non-Javadoc)
* @see org.elasticsearch.common.Recycler#popIntArray(int, int)
* @see org.elasticsearch.common.CacheRecycler#popIntArray(int, int)
*/
@Override
public int[] popIntArray(int size, int sentinal) {

View File

@ -17,27 +17,17 @@
* under the License.
*/
package org.elasticsearch.common;
package org.elasticsearch.cache.recycler;
import gnu.trove.map.hash.TByteIntHashMap;
import gnu.trove.map.hash.TDoubleIntHashMap;
import gnu.trove.map.hash.TFloatIntHashMap;
import gnu.trove.map.hash.TIntIntHashMap;
import gnu.trove.map.hash.TIntObjectHashMap;
import gnu.trove.map.hash.TLongIntHashMap;
import gnu.trove.map.hash.TLongLongHashMap;
import gnu.trove.map.hash.TObjectFloatHashMap;
import gnu.trove.map.hash.TObjectIntHashMap;
import gnu.trove.map.hash.TShortIntHashMap;
import gnu.trove.map.hash.*;
import gnu.trove.set.hash.THashSet;
import java.util.Arrays;
import org.elasticsearch.common.trove.ExtTDoubleObjectHashMap;
import org.elasticsearch.common.trove.ExtTHashMap;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
public class NoCacheCacheRecycler implements Recycler {
import java.util.Arrays;
public class NoneCacheRecycler implements CacheRecycler {
@Override
public void clear() {
@ -151,7 +141,7 @@ public class NoCacheCacheRecycler implements Recycler {
}
@Override
@SuppressWarnings({ "unchecked" })
@SuppressWarnings({"unchecked"})
public <T> TObjectIntHashMap<T> popObjectIntMap() {
return new TObjectIntHashMap();
}
@ -161,7 +151,7 @@ public class NoCacheCacheRecycler implements Recycler {
}
@Override
@SuppressWarnings({ "unchecked" })
@SuppressWarnings({"unchecked"})
public <T> TIntObjectHashMap<T> popIntObjectMap() {
return new TIntObjectHashMap<T>();
}
@ -171,7 +161,7 @@ public class NoCacheCacheRecycler implements Recycler {
}
@Override
@SuppressWarnings({ "unchecked" })
@SuppressWarnings({"unchecked"})
public <T> TObjectFloatHashMap<T> popObjectFloatMap() {
return new TObjectFloatHashMap();
}

View File

@ -39,21 +39,22 @@ import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.mlt.MoreLikeThisRequest;
import org.elasticsearch.action.termvector.TermVectorRequest;
import org.elasticsearch.action.termvector.TermVectorResponse;
import org.elasticsearch.action.percolate.PercolateRequest;
import org.elasticsearch.action.percolate.PercolateResponse;
import org.elasticsearch.action.search.*;
import org.elasticsearch.action.suggest.SuggestRequest;
import org.elasticsearch.action.suggest.SuggestResponse;
import org.elasticsearch.action.termvector.TermVectorRequest;
import org.elasticsearch.action.termvector.TermVectorResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecyclerModule;
import org.elasticsearch.client.AdminClient;
import org.elasticsearch.client.support.AbstractClient;
import org.elasticsearch.client.transport.support.InternalTransportClient;
import org.elasticsearch.cluster.ClusterNameModule;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.compress.CompressorFactory;
@ -165,6 +166,7 @@ public class TransportClient extends AbstractClient {
CompressorFactory.configure(this.settings);
ModulesBuilder modules = new ModulesBuilder();
modules.add(new CacheRecyclerModule(settings));
modules.add(new PluginsModule(this.settings, pluginsService));
modules.add(new EnvironmentModule(environment));
modules.add(new SettingsModule(this.settings));
@ -272,7 +274,8 @@ public class TransportClient extends AbstractClient {
// ignore
}
CacheRecycler.clear();
injector.getInstance(CacheRecycler.class).clear();
CachedStreams.clear();
ThreadLocals.clearReferencesThreadLocals();
}
@ -431,7 +434,7 @@ public class TransportClient extends AbstractClient {
public void moreLikeThis(MoreLikeThisRequest request, ActionListener<SearchResponse> listener) {
internalClient.moreLikeThis(request, listener);
}
@Override
public void termVector(TermVectorRequest request, ActionListener<TermVectorResponse> listener) {
internalClient.termVector(request, listener);

View File

@ -1,185 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common;
import gnu.trove.map.hash.*;
import gnu.trove.set.hash.THashSet;
import org.elasticsearch.common.trove.ExtTDoubleObjectHashMap;
import org.elasticsearch.common.trove.ExtTHashMap;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
public final class CacheRecycler {
private static final Recycler INSTANCE;
static {
String property = System.getProperty("es.cache.recycle");
if (property != null && !Boolean.parseBoolean(property)) {
INSTANCE = new NoCacheCacheRecycler();
} else {
INSTANCE = new DefaultCacheRecycler();
}
}
private CacheRecycler() {
// no instance
}
public static void clear() {
INSTANCE.clear();
}
public static <K, V> ExtTHashMap<K, V> popHashMap() {
return INSTANCE.popHashMap();
}
public static void pushHashMap(ExtTHashMap map) {
INSTANCE.pushHashMap(map);
}
public static <T> THashSet<T> popHashSet() {
return INSTANCE.popHashSet();
}
public static void pushHashSet(THashSet map) {
INSTANCE.pushHashSet(map);
}
public static <T> ExtTDoubleObjectHashMap<T> popDoubleObjectMap() {
return INSTANCE.popDoubleObjectMap();
}
public static void pushDoubleObjectMap(ExtTDoubleObjectHashMap map) {
INSTANCE.pushDoubleObjectMap(map);
}
public static <T> ExtTLongObjectHashMap<T> popLongObjectMap() {
return INSTANCE.popLongObjectMap();
}
public static void pushLongObjectMap(ExtTLongObjectHashMap map) {
INSTANCE.pushLongObjectMap(map);
}
public static TLongLongHashMap popLongLongMap() {
return INSTANCE.popLongLongMap();
}
public static void pushLongLongMap(TLongLongHashMap map) {
INSTANCE.pushLongLongMap(map);
}
public static TIntIntHashMap popIntIntMap() {
return INSTANCE.popIntIntMap();
}
public static void pushIntIntMap(TIntIntHashMap map) {
INSTANCE.pushIntIntMap(map);
}
public static TFloatIntHashMap popFloatIntMap() {
return INSTANCE.popFloatIntMap();
}
public static void pushFloatIntMap(TFloatIntHashMap map) {
INSTANCE.pushFloatIntMap(map);
}
public static TDoubleIntHashMap popDoubleIntMap() {
return INSTANCE.popDoubleIntMap();
}
public static void pushDoubleIntMap(TDoubleIntHashMap map) {
INSTANCE.pushDoubleIntMap(map);
}
public static TByteIntHashMap popByteIntMap() {
return INSTANCE.popByteIntMap();
}
public static void pushByteIntMap(TByteIntHashMap map) {
INSTANCE.pushByteIntMap(map);
}
public static TShortIntHashMap popShortIntMap() {
return INSTANCE.popShortIntMap();
}
public static void pushShortIntMap(TShortIntHashMap map) {
INSTANCE.pushShortIntMap(map);
}
public static TLongIntHashMap popLongIntMap() {
return INSTANCE.popLongIntMap();
}
public static void pushLongIntMap(TLongIntHashMap map) {
INSTANCE.pushLongIntMap(map);
}
public static <T> TObjectIntHashMap<T> popObjectIntMap() {
return INSTANCE.popObjectIntMap();
}
public static <T> void pushObjectIntMap(TObjectIntHashMap<T> map) {
INSTANCE.pushObjectIntMap(map);
}
public static <T> TIntObjectHashMap<T> popIntObjectMap() {
return INSTANCE.popIntObjectMap();
}
public static <T> void pushIntObjectMap(TIntObjectHashMap<T> map) {
INSTANCE.pushIntObjectMap(map);
}
public static <T> TObjectFloatHashMap<T> popObjectFloatMap() {
return INSTANCE.popObjectFloatMap();
}
public static <T> void pushObjectFloatMap(TObjectFloatHashMap<T> map) {
INSTANCE.pushObjectFloatMap(map);
}
public static Object[] popObjectArray(int size) {
return INSTANCE.popObjectArray(size);
}
public static void pushObjectArray(Object[] objects) {
INSTANCE.pushObjectArray(objects);
}
public static int[] popIntArray(int size) {
return INSTANCE.popIntArray(size);
}
public static int[] popIntArray(int size, int sentinal) {
return INSTANCE.popIntArray(size, sentinal);
}
public static void pushIntArray(int[] ints) {
INSTANCE.pushIntArray(ints);
}
public static void pushIntArray(int[] ints, int sentinal) {
INSTANCE.pushIntArray(ints, sentinal);
}
}

View File

@ -24,6 +24,7 @@ import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.CloseableThreadLocal;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
@ -69,6 +70,8 @@ public class IndexQueryParserService extends AbstractIndexComponent {
}
};
final CacheRecycler cacheRecycler;
final AnalysisService analysisService;
final ScriptService scriptService;
@ -92,13 +95,14 @@ public class IndexQueryParserService extends AbstractIndexComponent {
@Inject
public IndexQueryParserService(Index index, @IndexSettings Settings indexSettings,
IndicesQueriesRegistry indicesQueriesRegistry,
IndicesQueriesRegistry indicesQueriesRegistry, CacheRecycler cacheRecycler,
ScriptService scriptService, AnalysisService analysisService,
MapperService mapperService, IndexCache indexCache, IndexFieldDataService fieldDataService, IndexEngine indexEngine,
@Nullable SimilarityService similarityService,
@Nullable Map<String, QueryParserFactory> namedQueryParsers,
@Nullable Map<String, FilterParserFactory> namedFilterParsers) {
super(index, indexSettings);
this.cacheRecycler = cacheRecycler;
this.scriptService = scriptService;
this.analysisService = analysisService;
this.mapperService = mapperService;

View File

@ -26,6 +26,7 @@ import org.apache.lucene.queryparser.classic.QueryParserSettings;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.similarities.Similarity;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
@ -106,6 +107,10 @@ public class QueryParseContext {
return indexQueryParser.analysisService;
}
public CacheRecycler cacheRecycler() {
return indexQueryParser.cacheRecycler;
}
public ScriptService scriptService() {
return indexQueryParser.scriptService;
}

View File

@ -127,7 +127,7 @@ public class TopChildrenQueryParser implements QueryParser {
if (searchContext == null) {
throw new ElasticSearchIllegalStateException("[top_children] Can't execute, search context not set.");
}
TopChildrenQuery childQuery = new TopChildrenQuery(query, childType, parentType, scoreType, factor, incrementalFactor);
TopChildrenQuery childQuery = new TopChildrenQuery(query, childType, parentType, scoreType, factor, incrementalFactor, parseContext.cacheRecycler());
searchContext.addRewrite(childQuery);
return childQuery;
}

View File

@ -30,7 +30,7 @@ import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
@ -123,11 +123,11 @@ public class ChildrenQuery extends Query implements SearchContext.Rewrite {
public void contextRewrite(SearchContext searchContext) throws Exception {
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
uidToScore = CacheRecycler.popObjectFloatMap();
uidToScore = searchContext.cacheRecycler().popObjectFloatMap();
Collector collector;
switch (scoreType) {
case AVG:
uidToCount = CacheRecycler.popObjectIntMap();
uidToCount = searchContext.cacheRecycler().popObjectIntMap();
collector = new AvgChildUidCollector(scoreType, searchContext, parentType, uidToScore, uidToCount);
break;
default:
@ -145,11 +145,11 @@ public class ChildrenQuery extends Query implements SearchContext.Rewrite {
@Override
public void contextClear() {
if (uidToScore != null) {
CacheRecycler.pushObjectFloatMap(uidToScore);
searchContext.cacheRecycler().pushObjectFloatMap(uidToScore);
}
uidToScore = null;
if (uidToCount != null) {
CacheRecycler.pushObjectIntMap(uidToCount);
searchContext.cacheRecycler().pushObjectIntMap(uidToCount);
}
uidToCount = null;
}

View File

@ -27,7 +27,6 @@ import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.Bits;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
@ -112,7 +111,7 @@ public class HasChildFilter extends Filter implements SearchContext.Rewrite {
@Override
public void contextRewrite(SearchContext searchContext) throws Exception {
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
collectedUids = CacheRecycler.popHashSet();
collectedUids = searchContext.cacheRecycler().popHashSet();
UidCollector collector = new UidCollector(parentType, searchContext, collectedUids);
searchContext.searcher().search(childQuery, collector);
}
@ -120,7 +119,7 @@ public class HasChildFilter extends Filter implements SearchContext.Rewrite {
@Override
public void contextClear() {
if (collectedUids != null) {
CacheRecycler.pushHashSet(collectedUids);
searchContext.cacheRecycler().pushHashSet(collectedUids);
}
collectedUids = null;
}
@ -154,7 +153,7 @@ public class HasChildFilter extends Filter implements SearchContext.Rewrite {
}
@Override
public void collect(int doc, HashedBytesArray parentIdByDoc){
public void collect(int doc, HashedBytesArray parentIdByDoc) {
collectedUids.add(parentIdByDoc);
}

View File

@ -27,7 +27,6 @@ import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.Bits;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
@ -97,7 +96,7 @@ public class HasParentFilter extends Filter implements SearchContext.Rewrite {
@Override
public void contextRewrite(SearchContext searchContext) throws Exception {
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
parents = CacheRecycler.popHashSet();
parents = context.cacheRecycler().popHashSet();
ParentUidsCollector collector = new ParentUidsCollector(parents, context, parentType);
searchContext.searcher().search(parentQuery, collector);
parents = collector.collectedUids;
@ -106,7 +105,7 @@ public class HasParentFilter extends Filter implements SearchContext.Rewrite {
@Override
public void contextClear() {
if (parents != null) {
CacheRecycler.pushHashSet(parents);
context.cacheRecycler().pushHashSet(parents);
}
parents = null;
}

View File

@ -27,7 +27,6 @@ import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
@ -64,7 +63,7 @@ public class ParentQuery extends Query implements SearchContext.Rewrite {
@Override
public void contextRewrite(SearchContext searchContext) throws Exception {
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
uidToScore = CacheRecycler.popObjectFloatMap();
uidToScore = searchContext.cacheRecycler().popObjectFloatMap();
ParentUidCollector collector = new ParentUidCollector(uidToScore, searchContext, parentType);
Query parentQuery;
if (rewrittenParentQuery == null) {
@ -78,7 +77,7 @@ public class ParentQuery extends Query implements SearchContext.Rewrite {
@Override
public void contextClear() {
if (uidToScore != null) {
CacheRecycler.pushObjectFloatMap(uidToScore);
searchContext.cacheRecycler().pushObjectFloatMap(uidToScore);
}
uidToScore = null;
}

View File

@ -25,7 +25,7 @@ import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lucene.search.EmptyScorer;
import org.elasticsearch.common.trove.ExtTHashMap;
@ -53,6 +53,7 @@ import java.util.Set;
*/
public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
private final CacheRecycler cacheRecycler;
private final String parentType;
private final String childType;
private final ScoreType scoreType;
@ -65,13 +66,14 @@ public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
private ExtTHashMap<Object, ParentDoc[]> parentDocs;
// Note, the query is expected to already be filtered to only child type docs
public TopChildrenQuery(Query childQuery, String childType, String parentType, ScoreType scoreType, int factor, int incrementalFactor) {
public TopChildrenQuery(Query childQuery, String childType, String parentType, ScoreType scoreType, int factor, int incrementalFactor, CacheRecycler cacheRecycler) {
this.originalChildQuery = childQuery;
this.childType = childType;
this.parentType = parentType;
this.scoreType = scoreType;
this.factor = factor;
this.incrementalFactor = incrementalFactor;
this.cacheRecycler = cacheRecycler;
}
// Rewrite invocation logic:
@ -92,7 +94,7 @@ public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
@Override
public void contextRewrite(SearchContext searchContext) throws Exception {
this.parentDocs = CacheRecycler.popHashMap();
this.parentDocs = cacheRecycler.popHashMap();
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
int parentHitsResolved;
@ -132,14 +134,14 @@ public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
@Override
public void contextClear() {
if (parentDocs != null) {
CacheRecycler.pushHashMap(parentDocs);
cacheRecycler.pushHashMap(parentDocs);
parentDocs = null;
}
}
int resolveParentDocuments(TopDocs topDocs, SearchContext context) {
int parentHitsResolved = 0;
ExtTHashMap<Object, TIntObjectHashMap<ParentDoc>> parentDocsPerReader = CacheRecycler.popHashMap();
ExtTHashMap<Object, TIntObjectHashMap<ParentDoc>> parentDocsPerReader = cacheRecycler.popHashMap();
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
int readerIndex = ReaderUtil.subIndex(scoreDoc.doc, context.searcher().getIndexReader().leaves());
AtomicReaderContext subContext = context.searcher().getIndexReader().leaves().get(readerIndex);
@ -161,7 +163,7 @@ public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
TIntObjectHashMap<ParentDoc> readerParentDocs = parentDocsPerReader.get(indexReader.getCoreCacheKey());
if (readerParentDocs == null) {
readerParentDocs = CacheRecycler.popIntObjectMap();
readerParentDocs = cacheRecycler.popIntObjectMap();
parentDocsPerReader.put(indexReader.getCoreCacheKey(), readerParentDocs);
}
@ -189,9 +191,9 @@ public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
ParentDoc[] values = entry.getValue().values(new ParentDoc[entry.getValue().size()]);
Arrays.sort(values, PARENT_DOC_COMP);
parentDocs.put(entry.getKey(), values);
CacheRecycler.pushIntObjectMap(entry.getValue());
cacheRecycler.pushIntObjectMap(entry.getValue());
}
CacheRecycler.pushHashMap(parentDocsPerReader);
cacheRecycler.pushHashMap(parentDocsPerReader);
return parentHitsResolved;
}

View File

@ -26,7 +26,7 @@ import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification;
import gnu.trove.set.hash.THashSet;
import org.apache.lucene.search.DocIdSet;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
@ -45,6 +45,7 @@ import java.util.concurrent.TimeUnit;
public class IndicesFilterCache extends AbstractComponent implements RemovalListener<WeightedFilterCache.FilterCacheKey, DocIdSet> {
private final ThreadPool threadPool;
private final CacheRecycler cacheRecycler;
private Cache<WeightedFilterCache.FilterCacheKey, DocIdSet> cache;
@ -88,9 +89,10 @@ public class IndicesFilterCache extends AbstractComponent implements RemovalList
}
@Inject
public IndicesFilterCache(Settings settings, ThreadPool threadPool, NodeSettingsService nodeSettingsService) {
public IndicesFilterCache(Settings settings, ThreadPool threadPool, CacheRecycler cacheRecycler, NodeSettingsService nodeSettingsService) {
super(settings);
this.threadPool = threadPool;
this.cacheRecycler = cacheRecycler;
this.size = componentSettings.get("size", "20%");
this.expire = componentSettings.getAsTime("expire", null);
this.cleanInterval = componentSettings.getAsTime("clean_interval", TimeValue.timeValueSeconds(60));
@ -172,7 +174,7 @@ public class IndicesFilterCache extends AbstractComponent implements RemovalList
threadPool.executor(ThreadPool.Names.GENERIC).execute(new Runnable() {
@Override
public void run() {
THashSet<Object> keys = CacheRecycler.popHashSet();
THashSet<Object> keys = cacheRecycler.popHashSet();
try {
for (Iterator<Object> it = readersKeysToClean.iterator(); it.hasNext(); ) {
keys.add(it.next());
@ -190,7 +192,7 @@ public class IndicesFilterCache extends AbstractComponent implements RemovalList
}
threadPool.schedule(cleanInterval, ThreadPool.Names.SAME, ReaderCleaner.this);
} finally {
CacheRecycler.pushHashSet(keys);
cacheRecycler.pushHashSet(keys);
}
}
});

View File

@ -26,6 +26,8 @@ import org.elasticsearch.bulk.udp.BulkUdpModule;
import org.elasticsearch.bulk.udp.BulkUdpService;
import org.elasticsearch.cache.NodeCache;
import org.elasticsearch.cache.NodeCacheModule;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecyclerModule;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.node.NodeClientModule;
import org.elasticsearch.cluster.ClusterModule;
@ -33,7 +35,6 @@ import org.elasticsearch.cluster.ClusterNameModule;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.routing.RoutingService;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.Lifecycle;
@ -134,6 +135,7 @@ public final class InternalNode implements Node {
NodeEnvironment nodeEnvironment = new NodeEnvironment(this.settings, this.environment);
ModulesBuilder modules = new ModulesBuilder();
modules.add(new CacheRecyclerModule(settings));
modules.add(new PluginsModule(settings, pluginsService));
modules.add(new SettingsModule(settings));
modules.add(new NodeModule(this));
@ -344,9 +346,9 @@ public final class InternalNode implements Node {
}
injector.getInstance(NodeEnvironment.class).close();
injector.getInstance(CacheRecycler.class).clear();
Injectors.close(injector);
CacheRecycler.clear();
CachedStreams.clear();
ThreadLocals.clearReferencesThreadLocals();

View File

@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableMap;
import org.apache.lucene.search.TopDocs;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Nullable;
@ -80,6 +81,8 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
private final DfsPhase dfsPhase;
private final QueryPhase queryPhase;
@ -102,13 +105,14 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
@Inject
public SearchService(Settings settings, ClusterService clusterService, IndicesService indicesService, IndicesLifecycle indicesLifecycle, IndicesWarmer indicesWarmer, ThreadPool threadPool,
ScriptService scriptService, DfsPhase dfsPhase, QueryPhase queryPhase, FetchPhase fetchPhase) {
ScriptService scriptService, CacheRecycler cacheRecycler, DfsPhase dfsPhase, QueryPhase queryPhase, FetchPhase fetchPhase) {
super(settings);
this.threadPool = threadPool;
this.clusterService = clusterService;
this.indicesService = indicesService;
this.indicesWarmer = indicesWarmer;
this.scriptService = scriptService;
this.cacheRecycler = cacheRecycler;
this.dfsPhase = dfsPhase;
this.queryPhase = queryPhase;
this.fetchPhase = fetchPhase;
@ -476,7 +480,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), request.index(), request.shardId());
Engine.Searcher engineSearcher = searcher == null ? indexShard.searcher() : searcher;
SearchContext context = new SearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, indexService, indexShard, scriptService);
SearchContext context = new SearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, indexService, indexShard, scriptService, cacheRecycler);
SearchContext.setCurrent(context);
try {
context.scroll(request.scroll());

View File

@ -25,6 +25,7 @@ import com.google.common.collect.Ordering;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.apache.lucene.util.PriorityQueue;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.XMaps;
import org.elasticsearch.common.component.AbstractComponent;
@ -71,11 +72,13 @@ public class SearchPhaseController extends AbstractComponent {
private static final ShardDoc[] EMPTY = new ShardDoc[0];
private final CacheRecycler cacheRecycler;
private final boolean optimizeSingleShard;
@Inject
public SearchPhaseController(Settings settings) {
public SearchPhaseController(Settings settings, CacheRecycler cacheRecycler) {
super(settings);
this.cacheRecycler = cacheRecycler;
this.optimizeSingleShard = componentSettings.getAsBoolean("optimize_single_shard", true);
}
@ -327,7 +330,7 @@ public class SearchPhaseController extends AbstractComponent {
}
}
if (!namedFacets.isEmpty()) {
Facet aggregatedFacet = ((InternalFacet) namedFacets.get(0)).reduce(namedFacets);
Facet aggregatedFacet = ((InternalFacet) namedFacets.get(0)).reduce(new InternalFacet.ReduceContext(cacheRecycler, namedFacets));
aggregatedFacets.add(aggregatedFacet);
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.facet;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.io.stream.StreamInput;
@ -35,6 +36,24 @@ import java.util.List;
*/
public abstract class InternalFacet implements Facet, Streamable, ToXContent {
public static class ReduceContext {
private final CacheRecycler cacheRecycler;
private final List<Facet> facets;
public ReduceContext(CacheRecycler cacheRecycler, List<Facet> facets) {
this.cacheRecycler = cacheRecycler;
this.facets = facets;
}
public CacheRecycler cacheRecycler() {
return cacheRecycler;
}
public List<Facet> facets() {
return facets;
}
}
private String facetName;
/**
@ -50,7 +69,7 @@ public abstract class InternalFacet implements Facet, Streamable, ToXContent {
public abstract BytesReference streamType();
public abstract Facet reduce(List<Facet> facets);
public abstract Facet reduce(ReduceContext context);
public static interface Stream {
Facet readFacet(StreamInput in) throws IOException;

View File

@ -21,7 +21,7 @@ package org.elasticsearch.search.facet.datehistogram;
import gnu.trove.map.hash.TLongLongHashMap;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.joda.TimeZoneRounding;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.LongValues;
@ -37,18 +37,20 @@ import java.io.IOException;
*/
public class CountDateHistogramFacetExecutor extends FacetExecutor {
private final CacheRecycler cacheRecycler;
private final TimeZoneRounding tzRounding;
private final IndexNumericFieldData indexFieldData;
final DateHistogramFacet.ComparatorType comparatorType;
final TLongLongHashMap counts;
public CountDateHistogramFacetExecutor(IndexNumericFieldData indexFieldData, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType) {
public CountDateHistogramFacetExecutor(IndexNumericFieldData indexFieldData, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType, CacheRecycler cacheRecycler) {
this.comparatorType = comparatorType;
this.indexFieldData = indexFieldData;
this.tzRounding = tzRounding;
this.cacheRecycler = cacheRecycler;
this.counts = CacheRecycler.popLongLongMap();
this.counts = cacheRecycler.popLongLongMap();
}
@Override
@ -58,7 +60,7 @@ public class CountDateHistogramFacetExecutor extends FacetExecutor {
@Override
public InternalFacet buildFacet(String facetName) {
return new InternalCountDateHistogramFacet(facetName, comparatorType, counts, true);
return new InternalCountDateHistogramFacet(facetName, comparatorType, counts, cacheRecycler);
}
class Collector extends FacetExecutor.Collector {

View File

@ -179,16 +179,16 @@ public class DateHistogramFacetParser extends AbstractComponent implements Facet
if (valueScript != null) {
SearchScript script = context.scriptService().search(context.lookup(), scriptLang, valueScript, params);
return new ValueScriptDateHistogramFacetExecutor(keyIndexFieldData, script, tzRounding, comparatorType);
return new ValueScriptDateHistogramFacetExecutor(keyIndexFieldData, script, tzRounding, comparatorType, context.cacheRecycler());
} else if (valueField != null) {
FieldMapper valueMapper = context.smartNameFieldMapper(valueField);
if (valueMapper == null) {
throw new FacetPhaseExecutionException(facetName, "(value) field [" + valueField + "] not found");
}
IndexNumericFieldData valueIndexFieldData = context.fieldData().getForField(valueMapper);
return new ValueDateHistogramFacetExecutor(keyIndexFieldData, valueIndexFieldData, tzRounding, comparatorType);
return new ValueDateHistogramFacetExecutor(keyIndexFieldData, valueIndexFieldData, tzRounding, comparatorType, context.cacheRecycler());
} else {
return new CountDateHistogramFacetExecutor(keyIndexFieldData, tzRounding, comparatorType);
return new CountDateHistogramFacetExecutor(keyIndexFieldData, tzRounding, comparatorType, context.cacheRecycler());
}
}

View File

@ -21,7 +21,7 @@ package org.elasticsearch.search.facet.datehistogram;
import gnu.trove.iterator.TLongLongIterator;
import gnu.trove.map.hash.TLongLongHashMap;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.HashedBytesArray;
@ -108,24 +108,30 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
}
}
private ComparatorType comparatorType;
ComparatorType comparatorType;
TLongLongHashMap counts;
boolean cachedCounts;
CacheRecycler cacheRecycler;
CountEntry[] entries = null;
private InternalCountDateHistogramFacet() {
InternalCountDateHistogramFacet() {
}
public InternalCountDateHistogramFacet(String name, ComparatorType comparatorType, TLongLongHashMap counts, boolean cachedCounts) {
public InternalCountDateHistogramFacet(String name, ComparatorType comparatorType, TLongLongHashMap counts, CacheRecycler cacheRecycler) {
super(name);
this.comparatorType = comparatorType;
this.counts = counts;
this.cachedCounts = cachedCounts;
this.cacheRecycler = cacheRecycler;
}
public InternalCountDateHistogramFacet(String name, ComparatorType comparatorType, CountEntry[] entries) {
super(name);
this.comparatorType = comparatorType;
this.entries = entries;
}
@Override
public List<CountEntry> getEntries() {
return Arrays.asList(computeEntries());
return Arrays.asList(entries);
}
@Override
@ -134,46 +140,58 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
}
void releaseCache() {
if (cachedCounts) {
CacheRecycler.pushLongLongMap(counts);
cachedCounts = false;
if (cacheRecycler != null) {
cacheRecycler.pushLongLongMap(counts);
cacheRecycler = null;
counts = null;
}
}
private CountEntry[] computeEntries() {
if (entries != null) {
return entries;
@Override
public Facet reduce(ReduceContext context) {
List<Facet> facets = context.facets();
if (facets.size() == 1) {
InternalCountDateHistogramFacet histoFacet = (InternalCountDateHistogramFacet) facets.get(0);
if (histoFacet.entries == null) {
histoFacet.entries = new CountEntry[histoFacet.counts.size()];
int i = 0;
for (TLongLongIterator it = histoFacet.counts.iterator(); it.hasNext(); ) {
it.advance();
histoFacet.entries[i++] = new CountEntry(it.key(), it.value());
}
}
Arrays.sort(histoFacet.entries, histoFacet.comparatorType.comparator());
histoFacet.releaseCache();
return facets.get(0);
}
entries = new CountEntry[counts.size()];
TLongLongHashMap counts = context.cacheRecycler().popLongLongMap();
for (Facet facet : facets) {
InternalCountDateHistogramFacet histoFacet = (InternalCountDateHistogramFacet) facet;
if (histoFacet.entries != null) {
for (CountEntry entry : histoFacet.entries) {
counts.adjustOrPutValue(entry.getTime(), entry.getCount(), entry.getCount());
}
} else {
for (TLongLongIterator it = histoFacet.counts.iterator(); it.hasNext(); ) {
it.advance();
counts.adjustOrPutValue(it.key(), it.value(), it.value());
}
}
histoFacet.releaseCache();
}
CountEntry[] entries = new CountEntry[counts.size()];
int i = 0;
for (TLongLongIterator it = counts.iterator(); it.hasNext(); ) {
it.advance();
entries[i++] = new CountEntry(it.key(), it.value());
}
releaseCache();
context.cacheRecycler().pushLongLongMap(counts);
Arrays.sort(entries, comparatorType.comparator());
return entries;
}
@Override
public Facet reduce(List<Facet> facets) {
if (facets.size() == 1) {
return facets.get(0);
}
TLongLongHashMap counts = CacheRecycler.popLongLongMap();
for (Facet facet : facets) {
InternalCountDateHistogramFacet histoFacet = (InternalCountDateHistogramFacet) facet;
for (TLongLongIterator it = histoFacet.counts.iterator(); it.hasNext(); ) {
it.advance();
counts.adjustOrPutValue(it.key(), it.value(), it.value());
}
histoFacet.releaseCache();
}
return new InternalCountDateHistogramFacet(getName(), comparatorType, counts, true);
return new InternalCountDateHistogramFacet(getName(), comparatorType, entries);
}
static final class Fields {
@ -188,7 +206,7 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
builder.startObject(getName());
builder.field(Fields._TYPE, TYPE);
builder.startArray(Fields.ENTRIES);
for (Entry entry : computeEntries()) {
for (Entry entry : entries) {
builder.startObject();
builder.field(Fields.TIME, entry.getTime());
builder.field(Fields.COUNT, entry.getCount());
@ -211,11 +229,9 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
comparatorType = ComparatorType.fromId(in.readByte());
int size = in.readVInt();
counts = CacheRecycler.popLongLongMap();
cachedCounts = true;
entries = new CountEntry[size];
for (int i = 0; i < size; i++) {
long key = in.readLong();
counts.put(key, in.readVLong());
entries[i] = new CountEntry(in.readLong(), in.readVLong());
}
}
@ -223,11 +239,19 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeByte(comparatorType.id());
out.writeVInt(counts.size());
for (TLongLongIterator it = counts.iterator(); it.hasNext(); ) {
it.advance();
out.writeLong(it.key());
out.writeVLong(it.value());
if (entries != null) {
out.writeVInt(entries.length);
for (CountEntry entry : entries) {
out.writeLong(entry.getTime());
out.writeVLong(entry.getCount());
}
} else {
out.writeVInt(counts.size());
for (TLongLongIterator it = counts.iterator(); it.hasNext(); ) {
it.advance();
out.writeLong(it.key());
out.writeVLong(it.value());
}
}
releaseCache();
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.search.facet.datehistogram;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.HashedBytesArray;
@ -119,7 +119,7 @@ public class InternalFullDateHistogramFacet extends InternalDateHistogramFacet {
private ComparatorType comparatorType;
ExtTLongObjectHashMap<FullEntry> tEntries;
boolean cachedEntries;
CacheRecycler cacheRecycler;
Collection<FullEntry> entries;
InternalFullDateHistogramFacet() {
@ -129,11 +129,11 @@ public class InternalFullDateHistogramFacet extends InternalDateHistogramFacet {
super(name);
}
public InternalFullDateHistogramFacet(String name, ComparatorType comparatorType, ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries, boolean cachedEntries) {
public InternalFullDateHistogramFacet(String name, ComparatorType comparatorType, ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries, CacheRecycler cacheRecycler) {
super(name);
this.comparatorType = comparatorType;
this.tEntries = entries;
this.cachedEntries = cachedEntries;
this.cacheRecycler = cacheRecycler;
this.entries = entries.valueCollection();
}
@ -151,15 +151,16 @@ public class InternalFullDateHistogramFacet extends InternalDateHistogramFacet {
}
void releaseCache() {
if (cachedEntries) {
CacheRecycler.pushLongObjectMap(tEntries);
cachedEntries = false;
if (cacheRecycler != null) {
cacheRecycler.pushLongObjectMap(tEntries);
cacheRecycler = null;
tEntries = null;
}
}
@Override
public Facet reduce(List<Facet> facets) {
public Facet reduce(ReduceContext context) {
List<Facet> facets = context.facets();
if (facets.size() == 1) {
// we need to sort it
InternalFullDateHistogramFacet internalFacet = (InternalFullDateHistogramFacet) facets.get(0);
@ -169,7 +170,7 @@ public class InternalFullDateHistogramFacet extends InternalDateHistogramFacet {
return internalFacet;
}
ExtTLongObjectHashMap<FullEntry> map = CacheRecycler.popLongObjectMap();
ExtTLongObjectHashMap<FullEntry> map = context.cacheRecycler().popLongObjectMap();
for (Facet facet : facets) {
InternalFullDateHistogramFacet histoFacet = (InternalFullDateHistogramFacet) facet;
@ -204,7 +205,7 @@ public class InternalFullDateHistogramFacet extends InternalDateHistogramFacet {
ordered.add(value);
}
CacheRecycler.pushLongObjectMap(map);
context.cacheRecycler().pushLongObjectMap(map);
// just initialize it as already ordered facet
InternalFullDateHistogramFacet ret = new InternalFullDateHistogramFacet(getName());
@ -256,8 +257,6 @@ public class InternalFullDateHistogramFacet extends InternalDateHistogramFacet {
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
comparatorType = ComparatorType.fromId(in.readByte());
cachedEntries = false;
int size = in.readVInt();
entries = new ArrayList<FullEntry>(size);
for (int i = 0; i < size; i++) {

View File

@ -20,7 +20,7 @@
package org.elasticsearch.search.facet.datehistogram;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.joda.TimeZoneRounding;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
import org.elasticsearch.index.fielddata.DoubleValues;
@ -38,6 +38,7 @@ import java.io.IOException;
*/
public class ValueDateHistogramFacetExecutor extends FacetExecutor {
private final CacheRecycler cacheRecycler;
private final IndexNumericFieldData keyIndexFieldData;
private final IndexNumericFieldData valueIndexFieldData;
private final DateHistogramFacet.ComparatorType comparatorType;
@ -45,13 +46,14 @@ public class ValueDateHistogramFacetExecutor extends FacetExecutor {
final ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries;
public ValueDateHistogramFacetExecutor(IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType) {
public ValueDateHistogramFacetExecutor(IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType, CacheRecycler cacheRecycler) {
this.comparatorType = comparatorType;
this.keyIndexFieldData = keyIndexFieldData;
this.valueIndexFieldData = valueIndexFieldData;
this.tzRounding = tzRounding;
this.cacheRecycler = cacheRecycler;
this.entries = CacheRecycler.popLongObjectMap();
this.entries = cacheRecycler.popLongObjectMap();
}
@Override
@ -61,7 +63,7 @@ public class ValueDateHistogramFacetExecutor extends FacetExecutor {
@Override
public InternalFacet buildFacet(String facetName) {
return new InternalFullDateHistogramFacet(facetName, comparatorType, entries, true);
return new InternalFullDateHistogramFacet(facetName, comparatorType, entries, cacheRecycler);
}
class Collector extends FacetExecutor.Collector {

View File

@ -21,7 +21,7 @@ package org.elasticsearch.search.facet.datehistogram;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.joda.TimeZoneRounding;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
@ -39,6 +39,7 @@ import java.io.IOException;
*/
public class ValueScriptDateHistogramFacetExecutor extends FacetExecutor {
private final CacheRecycler cacheRecycler;
private final IndexNumericFieldData keyIndexFieldData;
private final DateHistogramFacet.ComparatorType comparatorType;
final SearchScript valueScript;
@ -46,13 +47,14 @@ public class ValueScriptDateHistogramFacetExecutor extends FacetExecutor {
final ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries;
public ValueScriptDateHistogramFacetExecutor(IndexNumericFieldData keyIndexFieldData, SearchScript valueScript, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType) {
public ValueScriptDateHistogramFacetExecutor(IndexNumericFieldData keyIndexFieldData, SearchScript valueScript, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType, CacheRecycler cacheRecycler) {
this.comparatorType = comparatorType;
this.keyIndexFieldData = keyIndexFieldData;
this.valueScript = valueScript;
this.tzRounding = tzRounding;
this.cacheRecycler = cacheRecycler;
this.entries = CacheRecycler.popLongObjectMap();
this.entries = cacheRecycler.popLongObjectMap();
}
@Override
@ -62,7 +64,7 @@ public class ValueScriptDateHistogramFacetExecutor extends FacetExecutor {
@Override
public InternalFacet buildFacet(String facetName) {
return new InternalFullDateHistogramFacet(facetName, comparatorType, entries, true);
return new InternalFullDateHistogramFacet(facetName, comparatorType, entries, cacheRecycler);
}
class Collector extends FacetExecutor.Collector {

View File

@ -78,7 +78,8 @@ public class InternalFilterFacet extends InternalFacet implements FilterFacet {
}
@Override
public Facet reduce(List<Facet> facets) {
public Facet reduce(ReduceContext context) {
List<Facet> facets = context.facets();
if (facets.size() == 1) {
return facets.get(0);
}

View File

@ -83,8 +83,8 @@ public class InternalGeoDistanceFacet extends InternalFacet implements GeoDistan
return getEntries().iterator();
}
@Override
public Facet reduce(List<Facet> facets) {
public Facet reduce(ReduceContext context) {
List<Facet> facets = context.facets();
if (facets.size() == 1) {
return facets.get(0);
}

View File

@ -21,7 +21,7 @@ package org.elasticsearch.search.facet.histogram;
import gnu.trove.map.hash.TLongLongHashMap;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.search.facet.DoubleFacetAggregatorBase;
@ -37,6 +37,7 @@ import java.io.IOException;
*/
public class CountHistogramFacetExecutor extends FacetExecutor {
private final CacheRecycler cacheRecycler;
private final IndexNumericFieldData indexFieldData;
private final HistogramFacet.ComparatorType comparatorType;
final long interval;
@ -47,8 +48,9 @@ public class CountHistogramFacetExecutor extends FacetExecutor {
this.comparatorType = comparatorType;
this.indexFieldData = indexFieldData;
this.interval = interval;
this.cacheRecycler = context.cacheRecycler();
this.counts = CacheRecycler.popLongLongMap();
this.counts = cacheRecycler.popLongLongMap();
}
@Override
@ -58,7 +60,7 @@ public class CountHistogramFacetExecutor extends FacetExecutor {
@Override
public InternalFacet buildFacet(String facetName) {
return new InternalCountHistogramFacet(facetName, comparatorType, counts, true);
return new InternalCountHistogramFacet(facetName, comparatorType, counts, cacheRecycler);
}
public static long bucket(double value, long interval) {

View File

@ -20,7 +20,7 @@
package org.elasticsearch.search.facet.histogram;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
@ -37,6 +37,7 @@ import java.io.IOException;
*/
public class FullHistogramFacetExecutor extends FacetExecutor {
private final CacheRecycler cacheRecycler;
private final IndexNumericFieldData indexFieldData;
private final HistogramFacet.ComparatorType comparatorType;
final long interval;
@ -47,8 +48,9 @@ public class FullHistogramFacetExecutor extends FacetExecutor {
this.comparatorType = comparatorType;
this.indexFieldData = indexFieldData;
this.interval = interval;
this.cacheRecycler = context.cacheRecycler();
this.entries = CacheRecycler.popLongObjectMap();
this.entries = cacheRecycler.popLongObjectMap();
}
@Override
@ -58,7 +60,7 @@ public class FullHistogramFacetExecutor extends FacetExecutor {
@Override
public InternalFacet buildFacet(String facetName) {
return new InternalFullHistogramFacet(facetName, comparatorType, entries, true);
return new InternalFullHistogramFacet(facetName, comparatorType, entries, cacheRecycler);
}
public static long bucket(double value, long interval) {

View File

@ -21,7 +21,7 @@ package org.elasticsearch.search.facet.histogram;
import gnu.trove.iterator.TLongLongIterator;
import gnu.trove.map.hash.TLongLongHashMap;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.HashedBytesArray;
@ -110,22 +110,28 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
ComparatorType comparatorType;
TLongLongHashMap counts;
boolean cachedCounts;
CacheRecycler cacheRecycler;
CountEntry[] entries = null;
private InternalCountHistogramFacet() {
}
public InternalCountHistogramFacet(String name, ComparatorType comparatorType, TLongLongHashMap counts, boolean cachedCounts) {
public InternalCountHistogramFacet(String name, ComparatorType comparatorType, TLongLongHashMap counts, CacheRecycler cacheRecycler) {
super(name);
this.comparatorType = comparatorType;
this.counts = counts;
this.cachedCounts = cachedCounts;
this.cacheRecycler = cacheRecycler;
}
public InternalCountHistogramFacet(String name, ComparatorType comparatorType, CountEntry[] entries) {
super(name);
this.comparatorType = comparatorType;
this.entries = entries;
}
@Override
public List<CountEntry> getEntries() {
return Arrays.asList(computeEntries());
return Arrays.asList(entries);
}
@Override
@ -133,46 +139,59 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
return (Iterator) getEntries().iterator();
}
private CountEntry[] computeEntries() {
if (entries != null) {
return entries;
}
entries = new CountEntry[counts.size()];
int i = 0;
for (TLongLongIterator it = counts.iterator(); it.hasNext(); ) {
it.advance();
entries[i++] = new CountEntry(it.key(), it.value());
}
releaseCache();
Arrays.sort(entries, comparatorType.comparator());
return entries;
}
void releaseCache() {
if (cachedCounts) {
CacheRecycler.pushLongLongMap(counts);
cachedCounts = false;
if (cacheRecycler != null) {
cacheRecycler.pushLongLongMap(counts);
cacheRecycler = null;
counts = null;
}
}
@Override
public Facet reduce(List<Facet> facets) {
public Facet reduce(ReduceContext context) {
List<Facet> facets = context.facets();
if (facets.size() == 1) {
// need to sort here...
InternalCountHistogramFacet histoFacet = (InternalCountHistogramFacet) facets.get(0);
if (histoFacet.entries == null) {
histoFacet.entries = new CountEntry[histoFacet.counts.size()];
int i = 0;
for (TLongLongIterator it = histoFacet.counts.iterator(); it.hasNext(); ) {
it.advance();
histoFacet.entries[i++] = new CountEntry(it.key(), it.value());
}
}
Arrays.sort(histoFacet.entries, histoFacet.comparatorType.comparator());
histoFacet.releaseCache();
return facets.get(0);
}
TLongLongHashMap counts = CacheRecycler.popLongLongMap();
TLongLongHashMap counts = context.cacheRecycler().popLongLongMap();
for (Facet facet : facets) {
InternalCountHistogramFacet histoFacet = (InternalCountHistogramFacet) facet;
for (TLongLongIterator it = histoFacet.counts.iterator(); it.hasNext(); ) {
it.advance();
counts.adjustOrPutValue(it.key(), it.value(), it.value());
if (histoFacet.entries != null) {
for (Entry entry : histoFacet.entries) {
counts.adjustOrPutValue(entry.getKey(), entry.getCount(), entry.getCount());
}
} else {
for (TLongLongIterator it = histoFacet.counts.iterator(); it.hasNext(); ) {
it.advance();
counts.adjustOrPutValue(it.key(), it.value(), it.value());
}
}
histoFacet.releaseCache();
}
CountEntry[] entries = new CountEntry[counts.size()];
int i = 0;
for (TLongLongIterator it = counts.iterator(); it.hasNext(); ) {
it.advance();
entries[i++] = new CountEntry(it.key(), it.value());
}
context.cacheRecycler().pushLongLongMap(counts);
return new InternalCountHistogramFacet(getName(), comparatorType, counts, true);
Arrays.sort(entries, comparatorType.comparator());
return new InternalCountHistogramFacet(getName(), comparatorType, entries);
}
static final class Fields {
@ -187,7 +206,7 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
builder.startObject(getName());
builder.field(Fields._TYPE, HistogramFacet.TYPE);
builder.startArray(Fields.ENTRIES);
for (Entry entry : computeEntries()) {
for (Entry entry : entries) {
builder.startObject();
builder.field(Fields.KEY, entry.getKey());
builder.field(Fields.COUNT, entry.getCount());
@ -208,13 +227,10 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
comparatorType = ComparatorType.fromId(in.readByte());
int size = in.readVInt();
counts = CacheRecycler.popLongLongMap();
cachedCounts = true;
entries = new CountEntry[size];
for (int i = 0; i < size; i++) {
long key = in.readLong();
counts.put(key, in.readVLong());
entries[i] = new CountEntry(in.readLong(), in.readVLong());
}
}
@ -222,12 +238,20 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeByte(comparatorType.id());
// optimize the write, since we know we have the same buckets as keys
out.writeVInt(counts.size());
for (TLongLongIterator it = counts.iterator(); it.hasNext(); ) {
it.advance();
out.writeLong(it.key());
out.writeVLong(it.value());
if (entries != null) {
out.writeVInt(entries.length);
for (CountEntry entry : entries) {
out.writeLong(entry.getKey());
out.writeVLong(entry.getCount());
}
} else {
// optimize the write, since we know we have the same buckets as keys
out.writeVInt(counts.size());
for (TLongLongIterator it = counts.iterator(); it.hasNext(); ) {
it.advance();
out.writeLong(it.key());
out.writeVLong(it.value());
}
}
releaseCache();
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.search.facet.histogram;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.HashedBytesArray;
@ -116,7 +116,7 @@ public class InternalFullHistogramFacet extends InternalHistogramFacet {
private ComparatorType comparatorType;
ExtTLongObjectHashMap<FullEntry> tEntries;
boolean cachedEntries;
CacheRecycler cacheRecycler;
Collection<FullEntry> entries;
InternalFullHistogramFacet() {
@ -126,11 +126,11 @@ public class InternalFullHistogramFacet extends InternalHistogramFacet {
super(name);
}
public InternalFullHistogramFacet(String name, ComparatorType comparatorType, ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries, boolean cachedEntries) {
public InternalFullHistogramFacet(String name, ComparatorType comparatorType, ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries, CacheRecycler cacheRecycler) {
super(name);
this.comparatorType = comparatorType;
this.tEntries = entries;
this.cachedEntries = cachedEntries;
this.cacheRecycler = cacheRecycler;
this.entries = entries.valueCollection();
}
@ -148,15 +148,16 @@ public class InternalFullHistogramFacet extends InternalHistogramFacet {
}
void releaseCache() {
if (cachedEntries) {
CacheRecycler.pushLongObjectMap(tEntries);
cachedEntries = false;
if (cacheRecycler != null) {
cacheRecycler.pushLongObjectMap(tEntries);
cacheRecycler = null;
tEntries = null;
}
}
@Override
public Facet reduce(List<Facet> facets) {
public Facet reduce(ReduceContext context) {
List<Facet> facets = context.facets();
if (facets.size() == 1) {
// we need to sort it
InternalFullHistogramFacet internalFacet = (InternalFullHistogramFacet) facets.get(0);
@ -166,7 +167,7 @@ public class InternalFullHistogramFacet extends InternalHistogramFacet {
return internalFacet;
}
ExtTLongObjectHashMap<FullEntry> map = CacheRecycler.popLongObjectMap();
ExtTLongObjectHashMap<FullEntry> map = context.cacheRecycler().popLongObjectMap();
for (Facet facet : facets) {
InternalFullHistogramFacet histoFacet = (InternalFullHistogramFacet) facet;
@ -201,7 +202,7 @@ public class InternalFullHistogramFacet extends InternalHistogramFacet {
ordered.add(value);
}
CacheRecycler.pushLongObjectMap(map);
context.cacheRecycler().pushLongObjectMap(map);
// just initialize it as already ordered facet
InternalFullHistogramFacet ret = new InternalFullHistogramFacet(getName());
@ -253,8 +254,6 @@ public class InternalFullHistogramFacet extends InternalHistogramFacet {
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
comparatorType = ComparatorType.fromId(in.readByte());
cachedEntries = false;
int size = in.readVInt();
entries = new ArrayList<FullEntry>(size);
for (int i = 0; i < size; i++) {

View File

@ -21,7 +21,7 @@ package org.elasticsearch.search.facet.histogram;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.FacetExecutor;
@ -36,6 +36,7 @@ import java.util.Map;
*/
public class ScriptHistogramFacetExecutor extends FacetExecutor {
final CacheRecycler cacheRecycler;
final SearchScript keyScript;
final SearchScript valueScript;
final long interval;
@ -48,8 +49,9 @@ public class ScriptHistogramFacetExecutor extends FacetExecutor {
this.valueScript = context.scriptService().search(context.lookup(), scriptLang, valueScript, params);
this.interval = interval > 0 ? interval : 0;
this.comparatorType = comparatorType;
this.cacheRecycler = context.cacheRecycler();
this.entries = CacheRecycler.popLongObjectMap();
this.entries = cacheRecycler.popLongObjectMap();
}
@Override
@ -59,7 +61,7 @@ public class ScriptHistogramFacetExecutor extends FacetExecutor {
@Override
public InternalFacet buildFacet(String facetName) {
return new InternalFullHistogramFacet(facetName, comparatorType, entries, true);
return new InternalFullHistogramFacet(facetName, comparatorType, entries, cacheRecycler);
}
public static long bucket(double value, long interval) {

View File

@ -20,7 +20,7 @@
package org.elasticsearch.search.facet.histogram;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
@ -36,6 +36,7 @@ import java.io.IOException;
*/
public class ValueHistogramFacetExecutor extends FacetExecutor {
private final CacheRecycler cacheRecycler;
private final IndexNumericFieldData keyIndexFieldData;
private final IndexNumericFieldData valueIndexFieldData;
private final HistogramFacet.ComparatorType comparatorType;
@ -48,7 +49,8 @@ public class ValueHistogramFacetExecutor extends FacetExecutor {
this.keyIndexFieldData = keyIndexFieldData;
this.valueIndexFieldData = valueIndexFieldData;
this.interval = interval;
this.entries = CacheRecycler.popLongObjectMap();
this.cacheRecycler = context.cacheRecycler();
this.entries = cacheRecycler.popLongObjectMap();
}
@Override
@ -58,7 +60,7 @@ public class ValueHistogramFacetExecutor extends FacetExecutor {
@Override
public InternalFacet buildFacet(String facetName) {
return new InternalFullHistogramFacet(facetName, comparatorType, entries, true);
return new InternalFullHistogramFacet(facetName, comparatorType, entries, cacheRecycler);
}
class Collector extends FacetExecutor.Collector {
@ -110,7 +112,7 @@ public class ValueHistogramFacetExecutor extends FacetExecutor {
}
entry.count++;
valueAggregator.entry = entry;
valueAggregator.onDoc(docId, valueValues);
valueAggregator.onDoc(docId, valueValues);
}
public final static class ValueAggregator extends DoubleFacetAggregatorBase {

View File

@ -21,7 +21,7 @@ package org.elasticsearch.search.facet.histogram;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
@ -40,6 +40,7 @@ import java.util.Map;
*/
public class ValueScriptHistogramFacetExecutor extends FacetExecutor {
private final CacheRecycler cacheRecycler;
private final IndexNumericFieldData indexFieldData;
private final HistogramFacet.ComparatorType comparatorType;
final SearchScript valueScript;
@ -52,8 +53,9 @@ public class ValueScriptHistogramFacetExecutor extends FacetExecutor {
this.indexFieldData = indexFieldData;
this.interval = interval;
this.valueScript = context.scriptService().search(context.lookup(), scriptLang, valueScript, params);
this.cacheRecycler = context.cacheRecycler();
this.entries = CacheRecycler.popLongObjectMap();
this.entries = cacheRecycler.popLongObjectMap();
}
@Override
@ -63,7 +65,7 @@ public class ValueScriptHistogramFacetExecutor extends FacetExecutor {
@Override
public InternalFacet buildFacet(String facetName) {
return new InternalFullHistogramFacet(facetName, comparatorType, entries, true);
return new InternalFullHistogramFacet(facetName, comparatorType, entries, cacheRecycler);
}
public static long bucket(double value, long interval) {

View File

@ -76,7 +76,8 @@ public class InternalQueryFacet extends InternalFacet implements QueryFacet {
}
@Override
public Facet reduce(List<Facet> facets) {
public Facet reduce(ReduceContext context) {
List<Facet> facets = context.facets();
if (facets.size() == 1) {
return facets.get(0);
}

View File

@ -84,7 +84,8 @@ public class InternalRangeFacet extends InternalFacet implements RangeFacet {
}
@Override
public Facet reduce(List<Facet> facets) {
public Facet reduce(ReduceContext context) {
List<Facet> facets = context.facets();
if (facets.size() == 1) {
return facets.get(0);
}

View File

@ -120,7 +120,8 @@ public class InternalStatisticalFacet extends InternalFacet implements Statistic
}
@Override
public Facet reduce(List<Facet> facets) {
public Facet reduce(ReduceContext context) {
List<Facet> facets = context.facets();
if (facets.size() == 1) {
return facets.get(0);
}

View File

@ -178,7 +178,7 @@ public class TermsFacetParser extends AbstractComponent implements FacetParser {
return new FieldsTermsStringFacetExecutor(facetName, mappers.toArray(new FieldMapper[mappers.size()]), size, comparatorType, allTerms, context, excluded, pattern, searchScript);
}
if (field == null && fieldsNames == null && script != null) {
return new ScriptTermsStringFieldFacetExecutor(size, comparatorType, context, excluded, pattern, scriptLang, script, params);
return new ScriptTermsStringFieldFacetExecutor(size, comparatorType, context, excluded, pattern, scriptLang, script, params, context.cacheRecycler());
}
FieldMapper fieldMapper = context.smartNameFieldMapper(field);
@ -190,9 +190,9 @@ public class TermsFacetParser extends AbstractComponent implements FacetParser {
if (indexFieldData instanceof IndexNumericFieldData) {
IndexNumericFieldData indexNumericFieldData = (IndexNumericFieldData) indexFieldData;
if (indexNumericFieldData.getNumericType().isFloatingPoint()) {
return new TermsDoubleFacetExecutor(indexNumericFieldData, size, comparatorType, allTerms, context, excluded, searchScript);
return new TermsDoubleFacetExecutor(indexNumericFieldData, size, comparatorType, allTerms, context, excluded, searchScript, context.cacheRecycler());
} else {
return new TermsLongFacetExecutor(indexNumericFieldData, size, comparatorType, allTerms, context, excluded, searchScript);
return new TermsLongFacetExecutor(indexNumericFieldData, size, comparatorType, allTerms, context, excluded, searchScript, context.cacheRecycler());
}
} else {
if (script != null || "map".equals(executionHint)) {

View File

@ -22,7 +22,6 @@ package org.elasticsearch.search.facet.terms.doubles;
import com.google.common.collect.ImmutableList;
import gnu.trove.iterator.TDoubleIntIterator;
import gnu.trove.map.hash.TDoubleIntHashMap;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.HashedBytesArray;
@ -159,14 +158,15 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
}
@Override
public Facet reduce(List<Facet> facets) {
public Facet reduce(ReduceContext context) {
List<Facet> facets = context.facets();
if (facets.size() == 1) {
return facets.get(0);
}
InternalDoubleTermsFacet first = null;
TDoubleIntHashMap aggregated = CacheRecycler.popDoubleIntMap();
TDoubleIntHashMap aggregated = context.cacheRecycler().popDoubleIntMap();
long missing = 0;
long total = 0;
for (Facet facet : facets) {
@ -178,7 +178,7 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
missing += termsFacet.getMissingCount();
total += termsFacet.getTotalCount();
for (Entry entry : termsFacet.getEntries()) {
aggregated.adjustOrPutValue(((DoubleEntry)entry).term, entry.getCount(), entry.getCount());
aggregated.adjustOrPutValue(((DoubleEntry) entry).term, entry.getCount(), entry.getCount());
}
}
@ -191,7 +191,7 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
first.missing = missing;
first.total = total;
CacheRecycler.pushDoubleIntMap(aggregated);
context.cacheRecycler().pushDoubleIntMap(aggregated);
return first;
}

View File

@ -27,7 +27,7 @@ import gnu.trove.set.hash.TDoubleHashSet;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.collect.BoundedTreeSet;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
@ -49,6 +49,7 @@ import java.util.Set;
*/
public class TermsDoubleFacetExecutor extends FacetExecutor {
private CacheRecycler cacheRecycler;
private final IndexNumericFieldData indexFieldData;
private final TermsFacet.ComparatorType comparatorType;
private final int size;
@ -60,14 +61,15 @@ public class TermsDoubleFacetExecutor extends FacetExecutor {
long total;
public TermsDoubleFacetExecutor(IndexNumericFieldData indexFieldData, int size, TermsFacet.ComparatorType comparatorType, boolean allTerms, SearchContext context,
ImmutableSet<BytesRef> excluded, SearchScript script) {
ImmutableSet<BytesRef> excluded, SearchScript script, CacheRecycler cacheRecycler) {
this.indexFieldData = indexFieldData;
this.size = size;
this.comparatorType = comparatorType;
this.script = script;
this.excluded = excluded;
this.cacheRecycler = cacheRecycler;
this.facets = CacheRecycler.popDoubleIntMap();
this.facets = cacheRecycler.popDoubleIntMap();
if (allTerms) {
for (AtomicReaderContext readerContext : context.searcher().getTopReaderContext().leaves()) {
@ -115,7 +117,7 @@ public class TermsDoubleFacetExecutor extends FacetExecutor {
@Override
public InternalFacet buildFacet(String facetName) {
if (facets.isEmpty()) {
CacheRecycler.pushDoubleIntMap(facets);
cacheRecycler.pushDoubleIntMap(facets);
return new InternalDoubleTermsFacet(facetName, comparatorType, size, ImmutableList.<InternalDoubleTermsFacet.DoubleEntry>of(), missing, total);
} else {
if (size < EntryPriorityQueue.LIMIT) {
@ -128,7 +130,7 @@ public class TermsDoubleFacetExecutor extends FacetExecutor {
for (int i = ordered.size() - 1; i >= 0; i--) {
list[i] = (InternalDoubleTermsFacet.DoubleEntry) ordered.pop();
}
CacheRecycler.pushDoubleIntMap(facets);
cacheRecycler.pushDoubleIntMap(facets);
return new InternalDoubleTermsFacet(facetName, comparatorType, size, Arrays.asList(list), missing, total);
} else {
BoundedTreeSet<InternalDoubleTermsFacet.DoubleEntry> ordered = new BoundedTreeSet<InternalDoubleTermsFacet.DoubleEntry>(comparatorType.comparator(), size);
@ -136,7 +138,7 @@ public class TermsDoubleFacetExecutor extends FacetExecutor {
it.advance();
ordered.add(new InternalDoubleTermsFacet.DoubleEntry(it.key(), it.value()));
}
CacheRecycler.pushDoubleIntMap(facets);
cacheRecycler.pushDoubleIntMap(facets);
return new InternalDoubleTermsFacet(facetName, comparatorType, size, ordered, missing, total);
}
}

View File

@ -22,7 +22,6 @@ package org.elasticsearch.search.facet.terms.longs;
import com.google.common.collect.ImmutableList;
import gnu.trove.iterator.TLongIntIterator;
import gnu.trove.map.hash.TLongIntHashMap;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.HashedBytesArray;
@ -160,14 +159,15 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
}
@Override
public Facet reduce(List<Facet> facets) {
public Facet reduce(ReduceContext context) {
List<Facet> facets = context.facets();
if (facets.size() == 1) {
return facets.get(0);
}
InternalLongTermsFacet first = null;
TLongIntHashMap aggregated = CacheRecycler.popLongIntMap();
TLongIntHashMap aggregated = context.cacheRecycler().popLongIntMap();
long missing = 0;
long total = 0;
for (Facet facet : facets) {
@ -192,7 +192,7 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
first.missing = missing;
first.total = total;
CacheRecycler.pushLongIntMap(aggregated);
context.cacheRecycler().pushLongIntMap(aggregated);
return first;
}

View File

@ -27,7 +27,7 @@ import gnu.trove.set.hash.TLongHashSet;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.collect.BoundedTreeSet;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.LongValues;
@ -49,6 +49,7 @@ import java.util.Set;
*/
public class TermsLongFacetExecutor extends FacetExecutor {
private final CacheRecycler cacheRecycler;
private final IndexNumericFieldData indexFieldData;
private final TermsFacet.ComparatorType comparatorType;
private final int size;
@ -60,13 +61,14 @@ public class TermsLongFacetExecutor extends FacetExecutor {
long total;
public TermsLongFacetExecutor(IndexNumericFieldData indexFieldData, int size, TermsFacet.ComparatorType comparatorType, boolean allTerms, SearchContext context,
ImmutableSet<BytesRef> excluded, SearchScript script) {
ImmutableSet<BytesRef> excluded, SearchScript script, CacheRecycler cacheRecycler) {
this.indexFieldData = indexFieldData;
this.size = size;
this.comparatorType = comparatorType;
this.script = script;
this.excluded = excluded;
this.facets = CacheRecycler.popLongIntMap();
this.cacheRecycler = cacheRecycler;
this.facets = cacheRecycler.popLongIntMap();
if (allTerms) {
for (AtomicReaderContext readerContext : context.searcher().getTopReaderContext().leaves()) {
@ -114,7 +116,7 @@ public class TermsLongFacetExecutor extends FacetExecutor {
@Override
public InternalFacet buildFacet(String facetName) {
if (facets.isEmpty()) {
CacheRecycler.pushLongIntMap(facets);
cacheRecycler.pushLongIntMap(facets);
return new InternalLongTermsFacet(facetName, comparatorType, size, ImmutableList.<InternalLongTermsFacet.LongEntry>of(), missing, total);
} else {
if (size < EntryPriorityQueue.LIMIT) {
@ -127,7 +129,7 @@ public class TermsLongFacetExecutor extends FacetExecutor {
for (int i = ordered.size() - 1; i >= 0; i--) {
list[i] = (InternalLongTermsFacet.LongEntry) ordered.pop();
}
CacheRecycler.pushLongIntMap(facets);
cacheRecycler.pushLongIntMap(facets);
return new InternalLongTermsFacet(facetName, comparatorType, size, Arrays.asList(list), missing, total);
} else {
BoundedTreeSet<InternalLongTermsFacet.LongEntry> ordered = new BoundedTreeSet<InternalLongTermsFacet.LongEntry>(comparatorType.comparator(), size);
@ -135,7 +137,7 @@ public class TermsLongFacetExecutor extends FacetExecutor {
it.advance();
ordered.add(new InternalLongTermsFacet.LongEntry(it.key(), it.value()));
}
CacheRecycler.pushLongIntMap(facets);
cacheRecycler.pushLongIntMap(facets);
return new InternalLongTermsFacet(facetName, comparatorType, size, ordered, missing, total);
}
}

View File

@ -23,7 +23,6 @@ import com.google.common.collect.ImmutableList;
import gnu.trove.iterator.TObjectIntIterator;
import gnu.trove.map.hash.TObjectIntHashMap;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
@ -169,14 +168,15 @@ public class InternalStringTermsFacet extends InternalTermsFacet {
}
@Override
public Facet reduce(List<Facet> facets) {
public Facet reduce(ReduceContext context) {
List<Facet> facets = context.facets();
if (facets.size() == 1) {
return facets.get(0);
}
InternalStringTermsFacet first = null;
TObjectIntHashMap<Text> aggregated = CacheRecycler.popObjectIntMap();
TObjectIntHashMap<Text> aggregated = context.cacheRecycler().popObjectIntMap();
long missing = 0;
long total = 0;
for (Facet facet : facets) {
@ -188,7 +188,7 @@ public class InternalStringTermsFacet extends InternalTermsFacet {
// the assumption is that if one of the facets is of different type, it should do the
// reduction (all the facets we iterated so far most likely represent unmapped fields, if not
// class cast exception will be thrown)
return termsFacet.reduce(facets);
return termsFacet.reduce(context);
}
if (first == null) {
@ -209,7 +209,7 @@ public class InternalStringTermsFacet extends InternalTermsFacet {
first.missing = missing;
first.total = total;
CacheRecycler.pushObjectIntMap(aggregated);
context.cacheRecycler().pushObjectIntMap(aggregated);
return first;
}

View File

@ -26,7 +26,7 @@ import gnu.trove.map.hash.TObjectIntHashMap;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.collect.BoundedTreeSet;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.FacetExecutor;
@ -45,6 +45,7 @@ import java.util.regex.Pattern;
*/
public class ScriptTermsStringFieldFacetExecutor extends FacetExecutor {
private final CacheRecycler cacheRecycler;
private final InternalStringTermsFacet.ComparatorType comparatorType;
private final int size;
private final SearchScript script;
@ -57,16 +58,18 @@ public class ScriptTermsStringFieldFacetExecutor extends FacetExecutor {
long total;
public ScriptTermsStringFieldFacetExecutor(int size, InternalStringTermsFacet.ComparatorType comparatorType, SearchContext context,
ImmutableSet<BytesRef> excluded, Pattern pattern, String scriptLang, String script, Map<String, Object> params) {
ImmutableSet<BytesRef> excluded, Pattern pattern, String scriptLang, String script, Map<String, Object> params,
CacheRecycler cacheRecycler) {
this.size = size;
this.comparatorType = comparatorType;
this.numberOfShards = context.numberOfShards();
this.script = context.scriptService().search(context.lookup(), scriptLang, script, params);
this.cacheRecycler = cacheRecycler;
this.excluded = excluded;
this.matcher = pattern != null ? pattern.matcher("") : null;
this.facets = CacheRecycler.popObjectIntMap();
this.facets = cacheRecycler.popObjectIntMap();
}
@Override
@ -77,7 +80,7 @@ public class ScriptTermsStringFieldFacetExecutor extends FacetExecutor {
@Override
public InternalFacet buildFacet(String facetName) {
if (facets.isEmpty()) {
CacheRecycler.pushObjectIntMap(facets);
cacheRecycler.pushObjectIntMap(facets);
return new InternalStringTermsFacet(facetName, comparatorType, size, ImmutableList.<InternalStringTermsFacet.TermEntry>of(), missing, total);
} else {
if (size < EntryPriorityQueue.LIMIT) {
@ -90,7 +93,7 @@ public class ScriptTermsStringFieldFacetExecutor extends FacetExecutor {
for (int i = ordered.size() - 1; i >= 0; i--) {
list[i] = ((InternalStringTermsFacet.TermEntry) ordered.pop());
}
CacheRecycler.pushObjectIntMap(facets);
cacheRecycler.pushObjectIntMap(facets);
return new InternalStringTermsFacet(facetName, comparatorType, size, Arrays.asList(list), missing, total);
} else {
BoundedTreeSet<InternalStringTermsFacet.TermEntry> ordered = new BoundedTreeSet<InternalStringTermsFacet.TermEntry>(comparatorType.comparator(), size);
@ -98,7 +101,7 @@ public class ScriptTermsStringFieldFacetExecutor extends FacetExecutor {
it.advance();
ordered.add(new InternalStringTermsFacet.TermEntry(it.key(), it.value()));
}
CacheRecycler.pushObjectIntMap(facets);
cacheRecycler.pushObjectIntMap(facets);
return new InternalStringTermsFacet(facetName, comparatorType, size, ordered, missing, total);
}
}

View File

@ -25,7 +25,7 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.PriorityQueue;
import org.apache.lucene.util.UnicodeUtil;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.collect.BoundedTreeSet;
import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.fielddata.IndexFieldData;
@ -51,6 +51,7 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
private final IndexFieldData.WithOrdinals indexFieldData;
final CacheRecycler cacheRecycler;
private final TermsFacet.ComparatorType comparatorType;
private final int size;
private final int minCount;
@ -83,6 +84,8 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
minCount = 0;
}
this.cacheRecycler = context.cacheRecycler();
this.aggregators = new ArrayList<ReaderAggregator>(context.searcher().getIndexReader().leaves().size());
}
@ -143,7 +146,7 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
for (ReaderAggregator aggregator : aggregators) {
if (aggregator.counts.length > ordinalsCacheAbove) {
CacheRecycler.pushIntArray(aggregator.counts);
cacheRecycler.pushIntArray(aggregator.counts);
}
}
@ -186,7 +189,7 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
for (ReaderAggregator aggregator : aggregators) {
if (aggregator.counts.length > ordinalsCacheAbove) {
CacheRecycler.pushIntArray(aggregator.counts);
cacheRecycler.pushIntArray(aggregator.counts);
}
}
@ -211,7 +214,7 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
}
}
values = indexFieldData.load(context).getBytesValues();
current = new ReaderAggregator(values, ordinalsCacheAbove);
current = new ReaderAggregator(values, ordinalsCacheAbove, cacheRecycler);
ordinals = values.ordinals();
}
@ -220,7 +223,7 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
Iter iter = ordinals.getIter(doc);
int ord = iter.next();
current.onOrdinal(doc, ord);
while((ord = iter.next()) != 0) {
while ((ord = iter.next()) != 0) {
current.onOrdinal(doc, ord);
}
}
@ -251,12 +254,12 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
int total;
private final int maxOrd;
public ReaderAggregator(BytesValues.WithOrdinals values, int ordinalsCacheLimit) {
public ReaderAggregator(BytesValues.WithOrdinals values, int ordinalsCacheLimit, CacheRecycler cacheRecycler) {
this.values = values;
this.maxOrd = values.ordinals().getMaxOrd();
if (maxOrd > ordinalsCacheLimit) {
this.counts = CacheRecycler.popIntArray(maxOrd);
this.counts = cacheRecycler.popIntArray(maxOrd);
} else {
this.counts = new int[maxOrd];
}

View File

@ -22,7 +22,6 @@ package org.elasticsearch.search.facet.termsstats.doubles;
import org.apache.lucene.util.CollectionUtil;
import com.google.common.collect.ImmutableList;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.HashedBytesArray;
@ -170,7 +169,8 @@ public class InternalTermsStatsDoubleFacet extends InternalTermsStatsFacet {
}
@Override
public Facet reduce(List<Facet> facets) {
public Facet reduce(ReduceContext context) {
List<Facet> facets = context.facets();
if (facets.size() == 1) {
if (requiredSize == 0) {
// we need to sort it here!
@ -183,7 +183,7 @@ public class InternalTermsStatsDoubleFacet extends InternalTermsStatsFacet {
return facets.get(0);
}
int missing = 0;
ExtTDoubleObjectHashMap<DoubleEntry> map = CacheRecycler.popDoubleObjectMap();
ExtTDoubleObjectHashMap<DoubleEntry> map = context.cacheRecycler().popDoubleObjectMap();
for (Facet facet : facets) {
InternalTermsStatsDoubleFacet tsFacet = (InternalTermsStatsDoubleFacet) facet;
missing += tsFacet.missing;
@ -210,7 +210,7 @@ public class InternalTermsStatsDoubleFacet extends InternalTermsStatsFacet {
if (requiredSize == 0) { // all terms
DoubleEntry[] entries1 = map.values(new DoubleEntry[map.size()]);
Arrays.sort(entries1, comparatorType.comparator());
CacheRecycler.pushDoubleObjectMap(map);
context.cacheRecycler().pushDoubleObjectMap(map);
return new InternalTermsStatsDoubleFacet(getName(), comparatorType, requiredSize, Arrays.asList(entries1), missing);
} else {
Object[] values = map.internalValues();
@ -223,7 +223,7 @@ public class InternalTermsStatsDoubleFacet extends InternalTermsStatsFacet {
}
ordered.add(value);
}
CacheRecycler.pushDoubleObjectMap(map);
context.cacheRecycler().pushDoubleObjectMap(map);
return new InternalTermsStatsDoubleFacet(getName(), comparatorType, requiredSize, ordered, missing);
}
}

View File

@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.trove.ExtTDoubleObjectHashMap;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
@ -43,6 +43,7 @@ public class TermsStatsDoubleFacetExecutor extends FacetExecutor {
private final TermsStatsFacet.ComparatorType comparatorType;
final CacheRecycler cacheRecycler;
final IndexNumericFieldData keyIndexFieldData;
final IndexNumericFieldData valueIndexFieldData;
final SearchScript script;
@ -59,8 +60,9 @@ public class TermsStatsDoubleFacetExecutor extends FacetExecutor {
this.keyIndexFieldData = keyIndexFieldData;
this.valueIndexFieldData = valueIndexFieldData;
this.script = script;
this.cacheRecycler = context.cacheRecycler();
this.entries = CacheRecycler.popDoubleObjectMap();
this.entries = cacheRecycler.popDoubleObjectMap();
}
@Override
@ -90,7 +92,7 @@ public class TermsStatsDoubleFacetExecutor extends FacetExecutor {
ordered.add(value);
}
CacheRecycler.pushDoubleObjectMap(entries);
cacheRecycler.pushDoubleObjectMap(entries);
return new InternalTermsStatsDoubleFacet(facetName, comparatorType, size, ordered, missing);
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.search.facet.termsstats.longs;
import com.google.common.collect.ImmutableList;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.HashedBytesArray;
@ -169,7 +168,8 @@ public class InternalTermsStatsLongFacet extends InternalTermsStatsFacet {
}
@Override
public Facet reduce(List<Facet> facets) {
public Facet reduce(ReduceContext context) {
List<Facet> facets = context.facets();
if (facets.size() == 1) {
if (requiredSize == 0) {
// we need to sort it here!
@ -182,7 +182,7 @@ public class InternalTermsStatsLongFacet extends InternalTermsStatsFacet {
return facets.get(0);
}
int missing = 0;
ExtTLongObjectHashMap<LongEntry> map = CacheRecycler.popLongObjectMap();
ExtTLongObjectHashMap<LongEntry> map = context.cacheRecycler().popLongObjectMap();
for (Facet facet : facets) {
InternalTermsStatsLongFacet tsFacet = (InternalTermsStatsLongFacet) facet;
missing += tsFacet.missing;
@ -209,7 +209,7 @@ public class InternalTermsStatsLongFacet extends InternalTermsStatsFacet {
if (requiredSize == 0) { // all terms
LongEntry[] entries1 = map.values(new LongEntry[map.size()]);
Arrays.sort(entries1, comparatorType.comparator());
CacheRecycler.pushLongObjectMap(map);
context.cacheRecycler().pushLongObjectMap(map);
return new InternalTermsStatsLongFacet(getName(), comparatorType, requiredSize, Arrays.asList(entries1), missing);
} else {
Object[] values = map.internalValues();
@ -222,7 +222,7 @@ public class InternalTermsStatsLongFacet extends InternalTermsStatsFacet {
}
ordered.add(value);
}
CacheRecycler.pushLongObjectMap(map);
context.cacheRecycler().pushLongObjectMap(map);
return new InternalTermsStatsLongFacet(getName(), comparatorType, requiredSize, ordered, missing);
}
}

View File

@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
@ -44,6 +44,7 @@ import java.util.List;
public class TermsStatsLongFacetExecutor extends FacetExecutor {
private final TermsStatsFacet.ComparatorType comparatorType;
final CacheRecycler cacheRecycler;
final IndexNumericFieldData keyIndexFieldData;
final IndexNumericFieldData valueIndexFieldData;
final SearchScript script;
@ -60,8 +61,9 @@ public class TermsStatsLongFacetExecutor extends FacetExecutor {
this.keyIndexFieldData = keyIndexFieldData;
this.valueIndexFieldData = valueIndexFieldData;
this.script = script;
this.cacheRecycler = context.cacheRecycler();
this.entries = CacheRecycler.popLongObjectMap();
this.entries = cacheRecycler.popLongObjectMap();
}
@Override
@ -92,7 +94,7 @@ public class TermsStatsLongFacetExecutor extends FacetExecutor {
}
ordered.add(value);
}
CacheRecycler.pushLongObjectMap(entries);
cacheRecycler.pushLongObjectMap(entries);
return new InternalTermsStatsLongFacet(facetName, comparatorType, size, ordered, missing);
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.search.facet.termsstats.strings;
import com.google.common.collect.ImmutableList;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
@ -174,7 +173,8 @@ public class InternalTermsStatsStringFacet extends InternalTermsStatsFacet {
}
@Override
public Facet reduce(List<Facet> facets) {
public Facet reduce(ReduceContext context) {
List<Facet> facets = context.facets();
if (facets.size() == 1) {
if (requiredSize == 0) {
// we need to sort it here!
@ -187,7 +187,7 @@ public class InternalTermsStatsStringFacet extends InternalTermsStatsFacet {
return facets.get(0);
}
int missing = 0;
ExtTHashMap<Text, StringEntry> map = CacheRecycler.popHashMap();
ExtTHashMap<Text, StringEntry> map = context.cacheRecycler().popHashMap();
for (Facet facet : facets) {
InternalTermsStatsStringFacet tsFacet = (InternalTermsStatsStringFacet) facet;
missing += tsFacet.missing;
@ -214,7 +214,7 @@ public class InternalTermsStatsStringFacet extends InternalTermsStatsFacet {
if (requiredSize == 0) { // all terms
StringEntry[] entries1 = map.values().toArray(new StringEntry[map.size()]);
Arrays.sort(entries1, comparatorType.comparator());
CacheRecycler.pushHashMap(map);
context.cacheRecycler().pushHashMap(map);
return new InternalTermsStatsStringFacet(getName(), comparatorType, requiredSize, Arrays.asList(entries1), missing);
} else {
Object[] values = map.internalValues();
@ -227,7 +227,7 @@ public class InternalTermsStatsStringFacet extends InternalTermsStatsFacet {
}
ordered.add(value);
}
CacheRecycler.pushHashMap(map);
context.cacheRecycler().pushHashMap(map);
return new InternalTermsStatsStringFacet(getName(), comparatorType, requiredSize, ordered, missing);
}
}

View File

@ -24,7 +24,7 @@ import com.google.common.collect.Lists;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.lucene.HashedBytesRef;
import org.elasticsearch.common.trove.ExtTHashMap;
import org.elasticsearch.index.fielddata.BytesValues;
@ -47,6 +47,7 @@ import java.util.List;
public class TermsStatsStringFacetExecutor extends FacetExecutor {
private final TermsStatsFacet.ComparatorType comparatorType;
final CacheRecycler cacheRecycler;
final IndexFieldData keyIndexFieldData;
final IndexNumericFieldData valueIndexFieldData;
final SearchScript script;
@ -62,7 +63,9 @@ public class TermsStatsStringFacetExecutor extends FacetExecutor {
this.script = valueScript;
this.size = size;
this.comparatorType = comparatorType;
this.entries = CacheRecycler.popHashMap();
this.cacheRecycler = context.cacheRecycler();
this.entries = cacheRecycler.popHashMap();
}
@Override
@ -92,7 +95,7 @@ public class TermsStatsStringFacetExecutor extends FacetExecutor {
ordered.add(value);
}
CacheRecycler.pushHashMap(entries); // fine to push here, we are done with it
cacheRecycler.pushHashMap(entries); // fine to push here, we are done with it
return new InternalTermsStatsStringFacet(facetName, comparatorType, size, ordered, missing);
}

View File

@ -26,6 +26,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.search.AndFilter;
@ -107,6 +108,8 @@ public class SearchContext implements Releasable {
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
private final IndexShard indexShard;
private final IndexService indexService;
@ -184,13 +187,15 @@ public class SearchContext implements Releasable {
public SearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget,
Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard, ScriptService scriptService) {
Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard,
ScriptService scriptService, CacheRecycler cacheRecycler) {
this.id = id;
this.request = request;
this.searchType = request.searchType();
this.shardTarget = shardTarget;
this.engineSearcher = engineSearcher;
this.scriptService = scriptService;
this.cacheRecycler = cacheRecycler;
this.dfsResult = new DfsSearchResult(id, shardTarget);
this.queryResult = new QuerySearchResult(id, shardTarget);
this.fetchResult = new FetchSearchResult(id, shardTarget);
@ -393,6 +398,10 @@ public class SearchContext implements Releasable {
return scriptService;
}
public CacheRecycler cacheRecycler() {
return cacheRecycler;
}
public FilterCache filterCache() {
return indexService.cache().filter();
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.test.unit.index.aliases;
import org.elasticsearch.cache.recycler.CacheRecyclerModule;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.common.compress.CompressedString;
import org.elasticsearch.common.inject.AbstractModule;
@ -65,6 +66,7 @@ public class IndexAliasesServiceTests {
public static IndexQueryParserService newIndexQueryParserService() {
Injector injector = new ModulesBuilder().add(
new IndicesQueriesModule(),
new CacheRecyclerModule(ImmutableSettings.Builder.EMPTY_SETTINGS),
new CodecModule(ImmutableSettings.Builder.EMPTY_SETTINGS),
new IndexSettingsModule(new Index("test"), ImmutableSettings.Builder.EMPTY_SETTINGS),
new IndexNameModule(new Index("test")),

View File

@ -25,6 +25,7 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.elasticsearch.cache.recycler.CacheRecyclerModule;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.AbstractModule;
@ -81,6 +82,7 @@ public class PercolatorExecutorTests {
.build();
Index index = new Index("test");
injector = new ModulesBuilder().add(
new CacheRecyclerModule(settings),
new IndexSettingsModule(index, settings),
new CodecModule(settings),
new SettingsModule(settings),

View File

@ -31,6 +31,7 @@ import org.apache.lucene.search.spans.*;
import org.apache.lucene.spatial.prefix.IntersectsPrefixTreeFilter;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.cache.recycler.CacheRecyclerModule;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
@ -102,6 +103,7 @@ public class SimpleIndexQueryParserTests {
.build();
Index index = new Index("test");
injector = new ModulesBuilder().add(
new CacheRecyclerModule(settings),
new CodecModule(settings),
new SettingsModule(settings),
new ThreadPoolModule(settings),

View File

@ -19,6 +19,7 @@
package org.elasticsearch.test.unit.index.query.guice;
import org.elasticsearch.cache.recycler.CacheRecyclerModule;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Injector;
@ -64,6 +65,7 @@ public class IndexQueryParserModuleTests {
Index index = new Index("test");
Injector injector = new ModulesBuilder().add(
new SettingsModule(settings),
new CacheRecyclerModule(settings),
new CodecModule(settings),
new ThreadPoolModule(settings),
new IndicesQueriesModule(),

View File

@ -19,6 +19,7 @@
package org.elasticsearch.test.unit.index.query.plugin;
import org.elasticsearch.cache.recycler.CacheRecyclerModule;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Injector;
@ -62,6 +63,7 @@ public class IndexQueryParserPlugin2Tests {
Index index = new Index("test");
Injector injector = new ModulesBuilder().add(
new CodecModule(settings),
new CacheRecyclerModule(settings),
new SettingsModule(settings),
new ThreadPoolModule(settings),
new IndicesQueriesModule(),

View File

@ -19,6 +19,7 @@
package org.elasticsearch.test.unit.index.query.plugin;
import org.elasticsearch.cache.recycler.CacheRecyclerModule;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Injector;
@ -71,6 +72,7 @@ public class IndexQueryParserPluginTests {
Index index = new Index("test");
Injector injector = new ModulesBuilder().add(
new SettingsModule(settings),
new CacheRecyclerModule(settings),
new ThreadPoolModule(settings),
new IndicesQueriesModule(),
new ScriptModule(settings),