Internal: Remove CacheRecycler.

The main consumer of this API was the faceting module. Now that it's gone,
let's remove CacheRecycler as well.

Close #7366
This commit is contained in:
Adrien Grand 2014-08-21 10:56:35 +02:00
parent 269a6dfb40
commit b5b1960a2b
28 changed files with 48 additions and 513 deletions

View File

@ -26,7 +26,6 @@ import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
@ -66,18 +65,15 @@ public class TransportValidateQueryAction extends TransportBroadcastOperationAct
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
private final PageCacheRecycler pageCacheRecycler;
private final BigArrays bigArrays;
@Inject
public TransportValidateQueryAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ScriptService scriptService, CacheRecycler cacheRecycler, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, ActionFilters actionFilters) {
public TransportValidateQueryAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, ActionFilters actionFilters) {
super(settings, ValidateQueryAction.NAME, threadPool, clusterService, transportService, actionFilters);
this.indicesService = indicesService;
this.scriptService = scriptService;
this.cacheRecycler = cacheRecycler;
this.pageCacheRecycler = pageCacheRecycler;
this.bigArrays = bigArrays;
}
@ -182,7 +178,7 @@ public class TransportValidateQueryAction extends TransportBroadcastOperationAct
new ShardSearchRequest(request).types(request.types()).nowInMillis(request.nowInMillis())
.filteringAliases(request.filteringAliases()),
null, indexShard.acquireSearcher("validate_query"), indexService, indexShard,
scriptService, cacheRecycler, pageCacheRecycler, bigArrays
scriptService, pageCacheRecycler, bigArrays
);
SearchContext.setCurrent(searchContext);
try {

View File

@ -26,7 +26,6 @@ import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
@ -69,20 +68,17 @@ public class TransportCountAction extends TransportBroadcastOperationAction<Coun
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
private final PageCacheRecycler pageCacheRecycler;
private final BigArrays bigArrays;
@Inject
public TransportCountAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService,
IndicesService indicesService, ScriptService scriptService, CacheRecycler cacheRecycler,
PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, ActionFilters actionFilters) {
IndicesService indicesService, ScriptService scriptService, PageCacheRecycler pageCacheRecycler,
BigArrays bigArrays, ActionFilters actionFilters) {
super(settings, CountAction.NAME, threadPool, clusterService, transportService, actionFilters);
this.indicesService = indicesService;
this.scriptService = scriptService;
this.cacheRecycler = cacheRecycler;
this.pageCacheRecycler = pageCacheRecycler;
this.bigArrays = bigArrays;
}
@ -174,7 +170,7 @@ public class TransportCountAction extends TransportBroadcastOperationAction<Coun
.filteringAliases(request.filteringAliases())
.nowInMillis(request.nowInMillis()),
shardTarget, indexShard.acquireSearcher("count"), indexService, indexShard,
scriptService, cacheRecycler, pageCacheRecycler, bigArrays);
scriptService, pageCacheRecycler, bigArrays);
SearchContext.setCurrent(context);
try {

View File

@ -24,7 +24,6 @@ import org.apache.lucene.search.Filter;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.replication.TransportShardReplicationOperationAction;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
@ -56,18 +55,16 @@ public class TransportShardDeleteByQueryAction extends TransportShardReplication
private static final String ACTION_NAME = DeleteByQueryAction.NAME + "[s]";
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
private final PageCacheRecycler pageCacheRecycler;
private final BigArrays bigArrays;
@Inject
public TransportShardDeleteByQueryAction(Settings settings, TransportService transportService,
ClusterService clusterService, IndicesService indicesService, ThreadPool threadPool,
ShardStateAction shardStateAction, ScriptService scriptService, CacheRecycler cacheRecycler,
ShardStateAction shardStateAction, ScriptService scriptService,
PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, ActionFilters actionFilters) {
super(settings, ACTION_NAME, transportService, clusterService, indicesService, threadPool, shardStateAction, actionFilters);
this.scriptService = scriptService;
this.cacheRecycler = cacheRecycler;
this.pageCacheRecycler = pageCacheRecycler;
this.bigArrays = bigArrays;
}
@ -109,7 +106,7 @@ public class TransportShardDeleteByQueryAction extends TransportShardReplication
IndexShard indexShard = indexService.shardSafe(shardRequest.shardId.id());
SearchContext.setCurrent(new DefaultSearchContext(0, new ShardSearchRequest(shardRequest.request).types(request.types()).nowInMillis(request.nowInMillis()), null,
indexShard.acquireSearcher(DELETE_BY_QUERY_API), indexService, indexShard, scriptService, cacheRecycler,
indexShard.acquireSearcher(DELETE_BY_QUERY_API), indexService, indexShard, scriptService,
pageCacheRecycler, bigArrays));
try {
Engine.DeleteByQuery deleteByQuery = indexShard.prepareDeleteByQuery(request.source(), request.filteringAliases(), Engine.Operation.Origin.PRIMARY, request.types());
@ -132,7 +129,7 @@ public class TransportShardDeleteByQueryAction extends TransportShardReplication
SearchContext.setCurrent(new DefaultSearchContext(0, new ShardSearchRequest(shardRequest).types(request.types()).nowInMillis(request.nowInMillis()), null,
indexShard.acquireSearcher(DELETE_BY_QUERY_API, IndexShard.Mode.WRITE), indexService, indexShard, scriptService,
cacheRecycler, pageCacheRecycler, bigArrays));
pageCacheRecycler, bigArrays));
try {
Engine.DeleteByQuery deleteByQuery = indexShard.prepareDeleteByQuery(request.source(), request.filteringAliases(), Engine.Operation.Origin.REPLICA, request.types());
SearchContext.current().parsedQuery(new ParsedQuery(deleteByQuery.query(), ImmutableMap.<String, Filter>of()));

View File

@ -27,7 +27,6 @@ import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
@ -69,20 +68,17 @@ public class TransportExistsAction extends TransportBroadcastOperationAction<Exi
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
private final PageCacheRecycler pageCacheRecycler;
private final BigArrays bigArrays;
@Inject
public TransportExistsAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService,
IndicesService indicesService, ScriptService scriptService, CacheRecycler cacheRecycler,
IndicesService indicesService, ScriptService scriptService,
PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, ActionFilters actionFilters) {
super(settings, ExistsAction.NAME, threadPool, clusterService, transportService, actionFilters);
this.indicesService = indicesService;
this.scriptService = scriptService;
this.cacheRecycler = cacheRecycler;
this.pageCacheRecycler = pageCacheRecycler;
this.bigArrays = bigArrays;
}
@ -175,7 +171,7 @@ public class TransportExistsAction extends TransportBroadcastOperationAction<Exi
.filteringAliases(request.filteringAliases())
.nowInMillis(request.nowInMillis()),
shardTarget, indexShard.acquireSearcher("exists"), indexService, indexShard,
scriptService, cacheRecycler, pageCacheRecycler, bigArrays);
scriptService, pageCacheRecycler, bigArrays);
SearchContext.setCurrent(context);
try {

View File

@ -26,7 +26,6 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.RoutingMissingException;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.single.shard.TransportShardSingleOperationAction;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
@ -63,8 +62,6 @@ public class TransportExplainAction extends TransportShardSingleOperationAction<
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
private final PageCacheRecycler pageCacheRecycler;
private final BigArrays bigArrays;
@ -72,12 +69,11 @@ public class TransportExplainAction extends TransportShardSingleOperationAction<
@Inject
public TransportExplainAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
TransportService transportService, IndicesService indicesService,
ScriptService scriptService, CacheRecycler cacheRecycler,
PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, ActionFilters actionFilters) {
ScriptService scriptService, PageCacheRecycler pageCacheRecycler,
BigArrays bigArrays, ActionFilters actionFilters) {
super(settings, ExplainAction.NAME, threadPool, clusterService, transportService, actionFilters);
this.indicesService = indicesService;
this.scriptService = scriptService;
this.cacheRecycler = cacheRecycler;
this.pageCacheRecycler = pageCacheRecycler;
this.bigArrays = bigArrays;
}
@ -122,7 +118,7 @@ public class TransportExplainAction extends TransportShardSingleOperationAction<
.filteringAliases(request.filteringAlias())
.nowInMillis(request.nowInMillis),
null, result.searcher(), indexService, indexShard,
scriptService, cacheRecycler, pageCacheRecycler,
scriptService, pageCacheRecycler,
bigArrays
);
SearchContext.setCurrent(context);

View File

@ -1,319 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cache.recycler;
import com.carrotsearch.hppc.*;
import com.google.common.base.Strings;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.recycler.AbstractRecyclerC;
import org.elasticsearch.common.recycler.Recycler;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import java.util.Locale;
import static org.elasticsearch.common.recycler.Recyclers.*;
@SuppressWarnings("unchecked")
public class CacheRecycler extends AbstractComponent {
public final Recycler<ObjectObjectOpenHashMap> hashMap;
public final Recycler<ObjectOpenHashSet> hashSet;
public final Recycler<DoubleObjectOpenHashMap> doubleObjectMap;
public final Recycler<LongObjectOpenHashMap> longObjectMap;
public final Recycler<LongLongOpenHashMap> longLongMap;
public final Recycler<IntIntOpenHashMap> intIntMap;
public final Recycler<FloatIntOpenHashMap> floatIntMap;
public final Recycler<DoubleIntOpenHashMap> doubleIntMap;
public final Recycler<LongIntOpenHashMap> longIntMap;
public final Recycler<ObjectIntOpenHashMap> objectIntMap;
public final Recycler<IntObjectOpenHashMap> intObjectMap;
public final Recycler<ObjectFloatOpenHashMap> objectFloatMap;
public void close() {
hashMap.close();
hashSet.close();
doubleObjectMap.close();
longObjectMap.close();
longLongMap.close();
intIntMap.close();
floatIntMap.close();
doubleIntMap.close();
longIntMap.close();
objectIntMap.close();
intObjectMap.close();
objectFloatMap.close();
}
@Inject
public CacheRecycler(Settings settings) {
super(settings);
final Type type = Type.parse(settings.get("type"));
int limit = settings.getAsInt("limit", 10);
int smartSize = settings.getAsInt("smart_size", 1024);
final int availableProcessors = EsExecutors.boundedNumberOfProcessors(settings);
hashMap = build(type, limit, smartSize, availableProcessors, new AbstractRecyclerC<ObjectObjectOpenHashMap>() {
@Override
public ObjectObjectOpenHashMap newInstance(int sizing) {
return new ObjectObjectOpenHashMap(size(sizing));
}
@Override
public void recycle(ObjectObjectOpenHashMap value) {
value.clear();
}
});
hashSet = build(type, limit, smartSize, availableProcessors, new AbstractRecyclerC<ObjectOpenHashSet>() {
@Override
public ObjectOpenHashSet newInstance(int sizing) {
return new ObjectOpenHashSet(size(sizing), 0.5f);
}
@Override
public void recycle(ObjectOpenHashSet value) {
value.clear();
}
});
doubleObjectMap = build(type, limit, smartSize, availableProcessors, new AbstractRecyclerC<DoubleObjectOpenHashMap>() {
@Override
public DoubleObjectOpenHashMap newInstance(int sizing) {
return new DoubleObjectOpenHashMap(size(sizing));
}
@Override
public void recycle(DoubleObjectOpenHashMap value) {
value.clear();
}
});
longObjectMap = build(type, limit, smartSize, availableProcessors, new AbstractRecyclerC<LongObjectOpenHashMap>() {
@Override
public LongObjectOpenHashMap newInstance(int sizing) {
return new LongObjectOpenHashMap(size(sizing));
}
@Override
public void recycle(LongObjectOpenHashMap value) {
value.clear();
}
});
longLongMap = build(type, limit, smartSize, availableProcessors, new AbstractRecyclerC<LongLongOpenHashMap>() {
@Override
public LongLongOpenHashMap newInstance(int sizing) {
return new LongLongOpenHashMap(size(sizing));
}
@Override
public void recycle(LongLongOpenHashMap value) {
value.clear();
}
});
intIntMap = build(type, limit, smartSize, availableProcessors, new AbstractRecyclerC<IntIntOpenHashMap>() {
@Override
public IntIntOpenHashMap newInstance(int sizing) {
return new IntIntOpenHashMap(size(sizing));
}
@Override
public void recycle(IntIntOpenHashMap value) {
value.clear();
}
});
floatIntMap = build(type, limit, smartSize, availableProcessors, new AbstractRecyclerC<FloatIntOpenHashMap>() {
@Override
public FloatIntOpenHashMap newInstance(int sizing) {
return new FloatIntOpenHashMap(size(sizing));
}
@Override
public void recycle(FloatIntOpenHashMap value) {
value.clear();
}
});
doubleIntMap = build(type, limit, smartSize, availableProcessors, new AbstractRecyclerC<DoubleIntOpenHashMap>() {
@Override
public DoubleIntOpenHashMap newInstance(int sizing) {
return new DoubleIntOpenHashMap(size(sizing));
}
@Override
public void recycle(DoubleIntOpenHashMap value) {
value.clear();
}
});
longIntMap = build(type, limit, smartSize, availableProcessors, new AbstractRecyclerC<LongIntOpenHashMap>() {
@Override
public LongIntOpenHashMap newInstance(int sizing) {
return new LongIntOpenHashMap(size(sizing));
}
@Override
public void recycle(LongIntOpenHashMap value) {
value.clear();
}
@Override
public void destroy(LongIntOpenHashMap value) {
// drop instance for GC
}
});
objectIntMap = build(type, limit, smartSize, availableProcessors, new AbstractRecyclerC<ObjectIntOpenHashMap>() {
@Override
public ObjectIntOpenHashMap newInstance(int sizing) {
return new ObjectIntOpenHashMap(size(sizing));
}
@Override
public void recycle(ObjectIntOpenHashMap value) {
value.clear();
}
});
intObjectMap = build(type, limit, smartSize, availableProcessors, new AbstractRecyclerC<IntObjectOpenHashMap>() {
@Override
public IntObjectOpenHashMap newInstance(int sizing) {
return new IntObjectOpenHashMap(size(sizing));
}
@Override
public void recycle(IntObjectOpenHashMap value) {
value.clear();
}
});
objectFloatMap = build(type, limit, smartSize, availableProcessors, new AbstractRecyclerC<ObjectFloatOpenHashMap>() {
@Override
public ObjectFloatOpenHashMap newInstance(int sizing) {
return new ObjectFloatOpenHashMap(size(sizing));
}
@Override
public void recycle(ObjectFloatOpenHashMap value) {
value.clear();
}
});
}
public <K, V> Recycler.V<ObjectObjectOpenHashMap<K, V>> hashMap(int sizing) {
return (Recycler.V) hashMap.obtain(sizing);
}
public <T> Recycler.V<ObjectOpenHashSet<T>> hashSet(int sizing) {
return (Recycler.V) hashSet.obtain(sizing);
}
public <T> Recycler.V<DoubleObjectOpenHashMap<T>> doubleObjectMap(int sizing) {
return (Recycler.V) doubleObjectMap.obtain(sizing);
}
public <T> Recycler.V<LongObjectOpenHashMap<T>> longObjectMap(int sizing) {
return (Recycler.V) longObjectMap.obtain(sizing);
}
public Recycler.V<LongLongOpenHashMap> longLongMap(int sizing) {
return longLongMap.obtain(sizing);
}
public Recycler.V<IntIntOpenHashMap> intIntMap(int sizing) {
return intIntMap.obtain(sizing);
}
public Recycler.V<FloatIntOpenHashMap> floatIntMap(int sizing) {
return floatIntMap.obtain(sizing);
}
public Recycler.V<DoubleIntOpenHashMap> doubleIntMap(int sizing) {
return doubleIntMap.obtain(sizing);
}
public Recycler.V<LongIntOpenHashMap> longIntMap(int sizing) {
return longIntMap.obtain(sizing);
}
public <T> Recycler.V<ObjectIntOpenHashMap<T>> objectIntMap(int sizing) {
return (Recycler.V) objectIntMap.obtain(sizing);
}
public <T> Recycler.V<IntObjectOpenHashMap<T>> intObjectMap(int sizing) {
return (Recycler.V) intObjectMap.obtain(sizing);
}
public <T> Recycler.V<ObjectFloatOpenHashMap<T>> objectFloatMap(int sizing) {
return (Recycler.V) objectFloatMap.obtain(sizing);
}
static int size(int sizing) {
return sizing > 0 ? sizing : 256;
}
private <T> Recycler<T> build(Type type, int limit, int smartSize, int availableProcessors, Recycler.C<T> c) {
Recycler<T> recycler;
try {
recycler = type.build(c, limit, availableProcessors);
if (smartSize > 0) {
recycler = sizing(recycler, none(c), smartSize);
}
} catch (IllegalArgumentException ex) {
throw new ElasticsearchIllegalArgumentException("no type support [" + type + "] for recycler");
}
return recycler;
}
public static enum Type {
QUEUE {
@Override
<T> Recycler<T> build(Recycler.C<T> c, int limit, int availableProcessors) {
return concurrentDeque(c, limit);
}
},
SOFT_CONCURRENT {
@Override
<T> Recycler<T> build(Recycler.C<T> c, int limit, int availableProcessors) {
return concurrent(softFactory(dequeFactory(c, limit)), availableProcessors);
}
},
CONCURRENT {
@Override
<T> Recycler<T> build(Recycler.C<T> c, int limit, int availableProcessors) {
return concurrent(dequeFactory(c, limit), availableProcessors);
}
},
NONE {
@Override
<T> Recycler<T> build(Recycler.C<T> c, int limit, int availableProcessors) {
return none(c);
}
};
public static Type parse(String type) {
if (Strings.isNullOrEmpty(type)) {
return SOFT_CONCURRENT;
}
try {
return Type.valueOf(type.toUpperCase(Locale.ROOT));
} catch (IllegalArgumentException e) {
throw new ElasticsearchIllegalArgumentException("no type support [" + type + "]");
}
}
abstract <T> Recycler<T> build(Recycler.C<T> c, int limit, int availableProcessors);
}
}

View File

@ -1,39 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cache.recycler;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.settings.Settings;
/**
*/
public class CacheRecyclerModule extends AbstractModule {
private final Settings settings;
public CacheRecyclerModule(Settings settings) {
this.settings = settings;
}
@Override
protected void configure() {
bind(CacheRecycler.class).asEagerSingleton();
}
}

View File

@ -54,8 +54,6 @@ import org.elasticsearch.action.termvector.TermVectorRequest;
import org.elasticsearch.action.termvector.TermVectorResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecyclerModule;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.client.AdminClient;
import org.elasticsearch.client.Client;
@ -177,7 +175,6 @@ public class TransportClient extends AbstractClient {
ModulesBuilder modules = new ModulesBuilder();
modules.add(new Version.Module(version));
modules.add(new CacheRecyclerModule(settings));
modules.add(new PluginsModule(this.settings, pluginsService));
modules.add(new EnvironmentModule(environment));
modules.add(new SettingsModule(this.settings));
@ -299,7 +296,6 @@ public class TransportClient extends AbstractClient {
// ignore
}
injector.getInstance(CacheRecycler.class).close();
injector.getInstance(PageCacheRecycler.class).close();
CachedStreams.clear();

View File

@ -24,7 +24,6 @@ import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.CloseableThreadLocal;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference;
@ -73,8 +72,6 @@ public class IndexQueryParserService extends AbstractIndexComponent {
}
};
final CacheRecycler cacheRecycler;
final AnalysisService analysisService;
final ScriptService scriptService;
@ -99,14 +96,13 @@ public class IndexQueryParserService extends AbstractIndexComponent {
@Inject
public IndexQueryParserService(Index index, @IndexSettings Settings indexSettings,
IndicesQueriesRegistry indicesQueriesRegistry, CacheRecycler cacheRecycler,
IndicesQueriesRegistry indicesQueriesRegistry,
ScriptService scriptService, AnalysisService analysisService,
MapperService mapperService, IndexCache indexCache, IndexFieldDataService fieldDataService, IndexEngine indexEngine,
@Nullable SimilarityService similarityService,
@Nullable Map<String, QueryParserFactory> namedQueryParsers,
@Nullable Map<String, FilterParserFactory> namedFilterParsers) {
super(index, indexSettings);
this.cacheRecycler = cacheRecycler;
this.scriptService = scriptService;
this.analysisService = analysisService;
this.mapperService = mapperService;

View File

@ -26,7 +26,6 @@ import org.apache.lucene.queryparser.classic.QueryParserSettings;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.similarities.Similarity;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.lucene.search.NoCacheFilter;
@ -132,10 +131,6 @@ public class QueryParseContext {
return indexQueryParser.analysisService;
}
public CacheRecycler cacheRecycler() {
return indexQueryParser.cacheRecycler;
}
public ScriptService scriptService() {
return indexQueryParser.scriptService;
}

View File

@ -135,7 +135,7 @@ public class TopChildrenQueryParser implements QueryParser {
// wrap the query with type query
innerQuery = new XFilteredQuery(innerQuery, parseContext.cacheFilter(childDocMapper.typeFilter(), null));
ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper);
TopChildrenQuery query = new TopChildrenQuery(parentChildIndexFieldData, innerQuery, childType, parentType, scoreType, factor, incrementalFactor, parseContext.cacheRecycler(), nonNestedDocsFilter);
TopChildrenQuery query = new TopChildrenQuery(parentChildIndexFieldData, innerQuery, childType, parentType, scoreType, factor, incrementalFactor, nonNestedDocsFilter);
if (queryName != null) {
parseContext.addNamedFilter(queryName, new CustomQueryWrappingFilter(query));
}

View File

@ -28,11 +28,8 @@ import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.search.EmptyScorer;
import org.elasticsearch.common.recycler.Recycler;
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
@ -63,7 +60,6 @@ public class TopChildrenQuery extends Query {
private static final ParentDocComparator PARENT_DOC_COMP = new ParentDocComparator();
private final IndexParentChildFieldData parentChildIndexFieldData;
private final CacheRecycler cacheRecycler;
private final String parentType;
private final String childType;
private final ScoreType scoreType;
@ -77,7 +73,7 @@ public class TopChildrenQuery extends Query {
private IndexReader rewriteIndexReader;
// Note, the query is expected to already be filtered to only child type docs
public TopChildrenQuery(IndexParentChildFieldData parentChildIndexFieldData, Query childQuery, String childType, String parentType, ScoreType scoreType, int factor, int incrementalFactor, CacheRecycler cacheRecycler, Filter nonNestedDocsFilter) {
public TopChildrenQuery(IndexParentChildFieldData parentChildIndexFieldData, Query childQuery, String childType, String parentType, ScoreType scoreType, int factor, int incrementalFactor, Filter nonNestedDocsFilter) {
this.parentChildIndexFieldData = parentChildIndexFieldData;
this.originalChildQuery = childQuery;
this.childType = childType;
@ -85,7 +81,6 @@ public class TopChildrenQuery extends Query {
this.scoreType = scoreType;
this.factor = factor;
this.incrementalFactor = incrementalFactor;
this.cacheRecycler = cacheRecycler;
this.nonNestedDocsFilter = nonNestedDocsFilter;
}
@ -123,7 +118,7 @@ public class TopChildrenQuery extends Query {
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
Recycler.V<ObjectObjectOpenHashMap<Object, ParentDoc[]>> parentDocs = cacheRecycler.hashMap(-1);
ObjectObjectOpenHashMap<Object, ParentDoc[]> parentDocs = new ObjectObjectOpenHashMap<>();
SearchContext searchContext = SearchContext.current();
int parentHitsResolved;
@ -144,7 +139,7 @@ public class TopChildrenQuery extends Query {
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity());
while (true) {
parentDocs.v().clear();
parentDocs.clear();
TopDocs topChildDocs = indexSearcher.search(childQuery, numChildDocs);
try {
parentHitsResolved = resolveParentDocuments(topChildDocs, searchContext, parentDocs);
@ -172,9 +167,9 @@ public class TopChildrenQuery extends Query {
return parentWeight;
}
int resolveParentDocuments(TopDocs topDocs, SearchContext context, Recycler.V<ObjectObjectOpenHashMap<Object, ParentDoc[]>> parentDocs) throws Exception {
int resolveParentDocuments(TopDocs topDocs, SearchContext context, ObjectObjectOpenHashMap<Object, ParentDoc[]> parentDocs) throws Exception {
int parentHitsResolved = 0;
Recycler.V<ObjectObjectOpenHashMap<Object, Recycler.V<IntObjectOpenHashMap<ParentDoc>>>> parentDocsPerReader = cacheRecycler.hashMap(context.searcher().getIndexReader().leaves().size());
ObjectObjectOpenHashMap<Object, IntObjectOpenHashMap<ParentDoc>> parentDocsPerReader = new ObjectObjectOpenHashMap<>(context.searcher().getIndexReader().leaves().size());
child_hits: for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
int readerIndex = ReaderUtil.subIndex(scoreDoc.doc, context.searcher().getIndexReader().leaves());
AtomicReaderContext subContext = context.searcher().getIndexReader().leaves().get(readerIndex);
@ -210,12 +205,12 @@ public class TopChildrenQuery extends Query {
}
if (parentDocId != DocsEnum.NO_MORE_DOCS) {
// we found a match, add it and break
Recycler.V<IntObjectOpenHashMap<ParentDoc>> readerParentDocs = parentDocsPerReader.v().get(indexReader.getCoreCacheKey());
IntObjectOpenHashMap<ParentDoc> readerParentDocs = parentDocsPerReader.get(indexReader.getCoreCacheKey());
if (readerParentDocs == null) {
readerParentDocs = cacheRecycler.intObjectMap(indexReader.maxDoc());
parentDocsPerReader.v().put(indexReader.getCoreCacheKey(), readerParentDocs);
readerParentDocs = new IntObjectOpenHashMap<>(indexReader.maxDoc());
parentDocsPerReader.put(indexReader.getCoreCacheKey(), readerParentDocs);
}
ParentDoc parentDoc = readerParentDocs.v().get(parentDocId);
ParentDoc parentDoc = readerParentDocs.get(parentDocId);
if (parentDoc == null) {
parentHitsResolved++; // we have a hit on a parent
parentDoc = new ParentDoc();
@ -223,7 +218,7 @@ public class TopChildrenQuery extends Query {
parentDoc.count = 1;
parentDoc.maxScore = scoreDoc.score;
parentDoc.sumScores = scoreDoc.score;
readerParentDocs.v().put(parentDocId, parentDoc);
readerParentDocs.put(parentDocId, parentDoc);
} else {
parentDoc.count++;
parentDoc.sumScores += scoreDoc.score;
@ -235,19 +230,17 @@ public class TopChildrenQuery extends Query {
}
}
}
boolean[] states = parentDocsPerReader.v().allocated;
Object[] keys = parentDocsPerReader.v().keys;
Object[] values = parentDocsPerReader.v().values;
boolean[] states = parentDocsPerReader.allocated;
Object[] keys = parentDocsPerReader.keys;
Object[] values = parentDocsPerReader.values;
for (int i = 0; i < states.length; i++) {
if (states[i]) {
Recycler.V<IntObjectOpenHashMap<ParentDoc>> value = (Recycler.V<IntObjectOpenHashMap<ParentDoc>>) values[i];
ParentDoc[] _parentDocs = value.v().values().toArray(ParentDoc.class);
IntObjectOpenHashMap<ParentDoc> value = (IntObjectOpenHashMap<ParentDoc>) values[i];
ParentDoc[] _parentDocs = value.values().toArray(ParentDoc.class);
Arrays.sort(_parentDocs, PARENT_DOC_COMP);
parentDocs.v().put(keys[i], _parentDocs);
Releasables.close(value);
parentDocs.put(keys[i], _parentDocs);
}
}
Releasables.close(parentDocsPerReader);
return parentHitsResolved;
}
@ -295,9 +288,9 @@ public class TopChildrenQuery extends Query {
private class ParentWeight extends Weight implements Releasable {
private final Weight queryWeight;
private final Recycler.V<ObjectObjectOpenHashMap<Object, ParentDoc[]>> parentDocs;
private final ObjectObjectOpenHashMap<Object, ParentDoc[]> parentDocs;
public ParentWeight(Weight queryWeight, Recycler.V<ObjectObjectOpenHashMap<Object, ParentDoc[]>> parentDocs) throws IOException {
public ParentWeight(Weight queryWeight, ObjectObjectOpenHashMap<Object, ParentDoc[]> parentDocs) throws IOException {
this.queryWeight = queryWeight;
this.parentDocs = parentDocs;
}
@ -320,12 +313,11 @@ public class TopChildrenQuery extends Query {
@Override
public void close() throws ElasticsearchException {
Releasables.close(parentDocs);
}
@Override
public Scorer scorer(AtomicReaderContext context, Bits acceptDocs) throws IOException {
ParentDoc[] readerParentDocs = parentDocs.v().get(context.reader().getCoreCacheKey());
ParentDoc[] readerParentDocs = parentDocs.get(context.reader().getCoreCacheKey());
if (readerParentDocs != null) {
if (scoreType == ScoreType.MAX) {
return new ParentScorer(this, readerParentDocs) {

View File

@ -26,8 +26,6 @@ import org.elasticsearch.action.ActionModule;
import org.elasticsearch.action.bench.BenchmarkModule;
import org.elasticsearch.bulk.udp.BulkUdpModule;
import org.elasticsearch.bulk.udp.BulkUdpService;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.CacheRecyclerModule;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecyclerModule;
import org.elasticsearch.client.Client;
@ -158,7 +156,6 @@ public final class InternalNode implements Node {
try {
ModulesBuilder modules = new ModulesBuilder();
modules.add(new Version.Module(version));
modules.add(new CacheRecyclerModule(settings));
modules.add(new PageCacheRecyclerModule(settings));
modules.add(new CircuitBreakerModule(settings));
modules.add(new BigArraysModule(settings));
@ -394,7 +391,6 @@ public final class InternalNode implements Node {
}
injector.getInstance(NodeEnvironment.class).close();
injector.getInstance(CacheRecycler.class).close();
injector.getInstance(PageCacheRecycler.class).close();
Injectors.close(injector);

View File

@ -26,7 +26,6 @@ import org.apache.lucene.search.*;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.percolate.PercolateShardRequest;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.text.StringText;
@ -86,7 +85,6 @@ public class PercolateContext extends SearchContext {
private final IndexService indexService;
private final IndexFieldDataService fieldDataService;
private final IndexShard indexShard;
private final CacheRecycler cacheRecycler;
private final PageCacheRecycler pageCacheRecycler;
private final BigArrays bigArrays;
private final ScriptService scriptService;
@ -110,7 +108,7 @@ public class PercolateContext extends SearchContext {
private Sort sort;
public PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, IndexShard indexShard,
IndexService indexService, CacheRecycler cacheRecycler, PageCacheRecycler pageCacheRecycler,
IndexService indexService, PageCacheRecycler pageCacheRecycler,
BigArrays bigArrays, ScriptService scriptService) {
this.indexShard = indexShard;
this.indexService = indexService;
@ -118,7 +116,6 @@ public class PercolateContext extends SearchContext {
this.searchShardTarget = searchShardTarget;
this.percolateQueries = indexShard.percolateRegistry().percolateQueries();
this.types = new String[]{request.documentType()};
this.cacheRecycler = cacheRecycler;
this.pageCacheRecycler = pageCacheRecycler;
this.bigArrays = bigArrays.withCircuitBreaking();
this.querySearchResult = new QuerySearchResult(0, searchShardTarget);
@ -437,11 +434,6 @@ public class PercolateContext extends SearchContext {
return scriptService;
}
@Override
public CacheRecycler cacheRecycler() {
return cacheRecycler;
}
@Override
public PageCacheRecycler pageCacheRecycler() {
return pageCacheRecycler;

View File

@ -33,7 +33,6 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.percolate.PercolateResponse;
import org.elasticsearch.action.percolate.PercolateShardRequest;
import org.elasticsearch.action.percolate.PercolateShardResponse;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
@ -104,7 +103,6 @@ public class PercolatorService extends AbstractComponent {
private final IndicesService indicesService;
private final ByteObjectOpenHashMap<PercolatorType> percolatorTypes;
private final CacheRecycler cacheRecycler;
private final PageCacheRecycler pageCacheRecycler;
private final BigArrays bigArrays;
private final ClusterService clusterService;
@ -121,14 +119,13 @@ public class PercolatorService extends AbstractComponent {
private final CloseableThreadLocal<MemoryIndex> cache;
@Inject
public PercolatorService(Settings settings, IndicesService indicesService, CacheRecycler cacheRecycler,
public PercolatorService(Settings settings, IndicesService indicesService,
PageCacheRecycler pageCacheRecycler, BigArrays bigArrays,
HighlightPhase highlightPhase, ClusterService clusterService,
AggregationPhase aggregationPhase, ScriptService scriptService,
MappingUpdatedAction mappingUpdatedAction) {
super(settings);
this.indicesService = indicesService;
this.cacheRecycler = cacheRecycler;
this.pageCacheRecycler = pageCacheRecycler;
this.bigArrays = bigArrays;
this.clusterService = clusterService;
@ -174,7 +171,7 @@ public class PercolatorService extends AbstractComponent {
SearchShardTarget searchShardTarget = new SearchShardTarget(clusterService.localNode().id(), request.shardId().getIndex(), request.shardId().id());
final PercolateContext context = new PercolateContext(
request, searchShardTarget, indexShard, percolateIndexService, cacheRecycler, pageCacheRecycler, bigArrays, scriptService
request, searchShardTarget, indexShard, percolateIndexService, pageCacheRecycler, bigArrays, scriptService
);
try {
ParsedDocument parsedDocument = parseRequest(percolateIndexService, request, context);

View File

@ -30,7 +30,6 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@ -70,11 +69,8 @@ import org.elasticsearch.search.dfs.CachedDfSource;
import org.elasticsearch.search.dfs.DfsPhase;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.*;
import org.elasticsearch.search.internal.DefaultSearchContext;
import org.elasticsearch.search.internal.InternalScrollSearchRequest;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.*;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.query.*;
import org.elasticsearch.search.warmer.IndexWarmersMetaData;
import org.elasticsearch.threadpool.ThreadPool;
@ -115,8 +111,6 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
private final PageCacheRecycler pageCacheRecycler;
private final BigArrays bigArrays;
@ -141,7 +135,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
@Inject
public SearchService(Settings settings, ClusterService clusterService, IndicesService indicesService, IndicesLifecycle indicesLifecycle, IndicesWarmer indicesWarmer, ThreadPool threadPool,
ScriptService scriptService, CacheRecycler cacheRecycler, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, DfsPhase dfsPhase, QueryPhase queryPhase, FetchPhase fetchPhase,
ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, DfsPhase dfsPhase, QueryPhase queryPhase, FetchPhase fetchPhase,
IndicesQueryCache indicesQueryCache) {
super(settings);
this.threadPool = threadPool;
@ -149,7 +143,6 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
this.indicesService = indicesService;
this.indicesWarmer = indicesWarmer;
this.scriptService = scriptService;
this.cacheRecycler = cacheRecycler;
this.pageCacheRecycler = pageCacheRecycler;
this.bigArrays = bigArrays;
this.dfsPhase = dfsPhase;
@ -525,7 +518,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), request.index(), request.shardId());
Engine.Searcher engineSearcher = searcher == null ? indexShard.acquireSearcher("search") : searcher;
SearchContext context = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, indexService, indexShard, scriptService, cacheRecycler, pageCacheRecycler, bigArrays);
SearchContext context = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, indexService, indexShard, scriptService, pageCacheRecycler, bigArrays);
SearchContext.setCurrent(context);
try {
context.scroll(request.scroll());

View File

@ -23,10 +23,8 @@ import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.ReaderContextAware;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.recycler.Recycler;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
@ -47,7 +45,6 @@ public class ReverseNestedAggregator extends SingleBucketAggregator implements R
private DocIdSetIterator parentDocs;
// TODO: Add LongIntPagedHashMap?
private final Recycler.V<LongIntOpenHashMap> bucketOrdToLastCollectedParentDocRecycler;
private final LongIntOpenHashMap bucketOrdToLastCollectedParentDoc;
public ReverseNestedAggregator(String name, AggregatorFactories factories, String nestedPath, AggregationContext aggregationContext, Aggregator parent) {
@ -74,8 +71,7 @@ public class ReverseNestedAggregator extends SingleBucketAggregator implements R
}
parentFilter = SearchContext.current().filterCache().cache(objectMapper.nestedTypeFilter());
}
bucketOrdToLastCollectedParentDocRecycler = aggregationContext.searchContext().cacheRecycler().longIntMap(32);
bucketOrdToLastCollectedParentDoc = bucketOrdToLastCollectedParentDocRecycler.v();
bucketOrdToLastCollectedParentDoc = new LongIntOpenHashMap(32);
aggregationContext.ensureScoreDocsInOrder();
}
@ -149,11 +145,6 @@ public class ReverseNestedAggregator extends SingleBucketAggregator implements R
return parentFilter;
}
@Override
protected void doClose() {
Releasables.close(bucketOrdToLastCollectedParentDocRecycler);
}
public static class Factory extends AggregatorFactory {
private final String path;

View File

@ -25,7 +25,6 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.analysis.AnalysisService;
@ -316,11 +315,6 @@ public class TopHitsContext extends SearchContext {
return context.scriptService();
}
@Override
public CacheRecycler cacheRecycler() {
return context.cacheRecycler();
}
@Override
public PageCacheRecycler pageCacheRecycler() {
return context.pageCacheRecycler();

View File

@ -24,7 +24,6 @@ import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.collect.HppcMaps;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
@ -73,7 +72,7 @@ public class SearchPhaseController extends AbstractComponent {
private ScriptService scriptService;
@Inject
public SearchPhaseController(Settings settings, CacheRecycler cacheRecycler, BigArrays bigArrays, ScriptService scriptService) {
public SearchPhaseController(Settings settings, BigArrays bigArrays, ScriptService scriptService) {
super(settings);
this.bigArrays = bigArrays;
this.scriptService = scriptService;

View File

@ -27,7 +27,6 @@ import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasables;
@ -90,8 +89,6 @@ public class DefaultSearchContext extends SearchContext {
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
private final PageCacheRecycler pageCacheRecycler;
private final BigArrays bigArrays;
@ -180,7 +177,7 @@ public class DefaultSearchContext extends SearchContext {
public DefaultSearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget,
Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard,
ScriptService scriptService, CacheRecycler cacheRecycler, PageCacheRecycler pageCacheRecycler,
ScriptService scriptService, PageCacheRecycler pageCacheRecycler,
BigArrays bigArrays) {
this.id = id;
this.request = request;
@ -188,7 +185,6 @@ public class DefaultSearchContext extends SearchContext {
this.shardTarget = shardTarget;
this.engineSearcher = engineSearcher;
this.scriptService = scriptService;
this.cacheRecycler = cacheRecycler;
this.pageCacheRecycler = pageCacheRecycler;
// SearchContexts use a BigArrays that can circuit break
this.bigArrays = bigArrays.withCircuitBreaking();
@ -431,10 +427,6 @@ public class DefaultSearchContext extends SearchContext {
return scriptService;
}
public CacheRecycler cacheRecycler() {
return cacheRecycler;
}
public PageCacheRecycler pageCacheRecycler() {
return pageCacheRecycler;
}

View File

@ -26,7 +26,6 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasable;
@ -203,8 +202,6 @@ public abstract class SearchContext implements Releasable {
public abstract ScriptService scriptService();
public abstract CacheRecycler cacheRecycler();
public abstract PageCacheRecycler pageCacheRecycler();
public abstract BigArrays bigArrays();

View File

@ -20,7 +20,6 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.cache.recycler.CacheRecyclerModule;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Injector;
@ -73,7 +72,6 @@ public class TemplateQueryParserTest extends ElasticsearchTestCase {
injector = new ModulesBuilder().add(
new EnvironmentModule(new Environment(settings)),
new SettingsModule(settings),
new CacheRecyclerModule(settings),
new CodecModule(settings),
new ThreadPoolModule(settings),
new IndicesQueriesModule(),

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.query.plugin;
import org.elasticsearch.cache.recycler.CacheRecyclerModule;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Injector;
@ -66,7 +65,6 @@ public class IndexQueryParserPlugin2Tests extends ElasticsearchTestCase {
Index index = new Index("test");
Injector injector = new ModulesBuilder().add(
new CodecModule(settings),
new CacheRecyclerModule(settings),
new SettingsModule(settings),
new ThreadPoolModule(settings),
new IndicesQueriesModule(),

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.query.plugin;
import org.elasticsearch.cache.recycler.CacheRecyclerModule;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Injector;
@ -75,7 +74,6 @@ public class IndexQueryParserPluginTests extends ElasticsearchTestCase {
Index index = new Index("test");
Injector injector = new ModulesBuilder().add(
new SettingsModule(settings),
new CacheRecyclerModule(settings),
new ThreadPoolModule(settings),
new IndicesQueriesModule(),
new ScriptModule(settings),

View File

@ -57,7 +57,7 @@ public class TopChildrenQueryTests extends AbstractChildTests {
ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)];
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
Query query = new TopChildrenQuery(parentChildIndexFieldData, childQuery, "child", "parent", scoreType, 1, 1, SearchContext.current().cacheRecycler(), NonNestedDocsFilter.INSTANCE);
Query query = new TopChildrenQuery(parentChildIndexFieldData, childQuery, "child", "parent", scoreType, 1, 1, NonNestedDocsFilter.INSTANCE);
QueryUtils.check(query);
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.test;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
@ -179,9 +178,8 @@ public abstract class ElasticsearchSingleNodeTest extends ElasticsearchTestCase
protected static SearchContext createSearchContext(IndexService indexService) {
BigArrays bigArrays = indexService.injector().getInstance(BigArrays.class);
ThreadPool threadPool = indexService.injector().getInstance(ThreadPool.class);
CacheRecycler cacheRecycler = indexService.injector().getInstance(CacheRecycler.class);
PageCacheRecycler pageCacheRecycler = indexService.injector().getInstance(PageCacheRecycler.class);
return new TestSearchContext(threadPool, cacheRecycler, pageCacheRecycler, bigArrays, indexService, indexService.cache().filter(), indexService.fieldData());
return new TestSearchContext(threadPool, pageCacheRecycler, bigArrays, indexService, indexService.cache().filter(), indexService.fieldData());
}
}

View File

@ -34,7 +34,7 @@ import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecyclerModule;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.node.NodeClient;
@ -334,9 +334,8 @@ public final class InternalTestCluster extends TestCluster {
} else {
builder.put(Transport.TransportSettings.TRANSPORT_TCP_COMPRESS, rarely(random));
}
builder.put("type", RandomPicks.randomFrom(random, CacheRecycler.Type.values()));
if (random.nextBoolean()) {
builder.put("cache.recycler.page.type", RandomPicks.randomFrom(random, CacheRecycler.Type.values()));
builder.put("cache.recycler.page.type", RandomPicks.randomFrom(random, PageCacheRecycler.Type.values()));
}
if (random.nextInt(10) == 0) { // 10% of the nodes have a very frequent check interval
builder.put(SearchService.KEEPALIVE_INTERVAL_KEY, TimeValue.timeValueMillis(10 + random.nextInt(2000)));

View File

@ -24,7 +24,6 @@ import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.analysis.AnalysisService;
@ -65,7 +64,6 @@ import java.util.List;
public class TestSearchContext extends SearchContext {
final CacheRecycler cacheRecycler;
final PageCacheRecycler pageCacheRecycler;
final BigArrays bigArrays;
final IndexService indexService;
@ -78,8 +76,7 @@ public class TestSearchContext extends SearchContext {
private int terminateAfter = DEFAULT_TERMINATE_AFTER;
private String[] types;
public TestSearchContext(ThreadPool threadPool, CacheRecycler cacheRecycler, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, IndexService indexService, FilterCache filterCache, IndexFieldDataService indexFieldDataService) {
this.cacheRecycler = cacheRecycler;
public TestSearchContext(ThreadPool threadPool,PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, IndexService indexService, FilterCache filterCache, IndexFieldDataService indexFieldDataService) {
this.pageCacheRecycler = pageCacheRecycler;
this.bigArrays = bigArrays.withCircuitBreaking();
this.indexService = indexService;
@ -89,7 +86,6 @@ public class TestSearchContext extends SearchContext {
}
public TestSearchContext() {
this.cacheRecycler = null;
this.pageCacheRecycler = null;
this.bigArrays = null;
this.indexService = null;
@ -310,11 +306,6 @@ public class TestSearchContext extends SearchContext {
return null;
}
@Override
public CacheRecycler cacheRecycler() {
return cacheRecycler;
}
@Override
public PageCacheRecycler pageCacheRecycler() {
return pageCacheRecycler;