Improved SearchContext.addReleasable.

For resources that have their life time effectively defined by the search
context they are attached to, it is convenient to use the search context to
schedule the release of such resources.

This commit changes aggregations to use this mechanism and also introduces
a `Lifetime` object that can be used to define how long the object should
live:
 - COLLECTION: if the object only needs to live during collection time and is
   what SearchContext.addReleasable would have chosen before this change
   (used for p/c queries),
 - SEARCH_PHASE for resources that only need to live during the current search
   phase (DFS, QUERY or FETCH),
 - SEARCH_CONTEXT for resources that need to live until the context is
   destroyed.

Aggregators are currently registed with SEARCH_CONTEXT. The reason is that when
using the DFS_QUERY_THEN_FETCH search type, they are allocated during the DFS
phase but only used during the QUERY phase. However we should fix it in order
to only allocate them during the QUERY phase and use SEARCH_PHASE as a life
time.

Close #5703
This commit is contained in:
Adrien Grand 2014-04-11 11:18:49 +02:00
parent e589301806
commit e458d4fd93
24 changed files with 156 additions and 250 deletions

View File

@ -124,10 +124,10 @@ public class TransportShardDeleteByQueryAction extends TransportShardReplication
SearchContext.current().parsedQuery(new ParsedQuery(deleteByQuery.query(), ImmutableMap.<String, Filter>of())); SearchContext.current().parsedQuery(new ParsedQuery(deleteByQuery.query(), ImmutableMap.<String, Filter>of()));
indexShard.deleteByQuery(deleteByQuery); indexShard.deleteByQuery(deleteByQuery);
} finally { } finally {
SearchContext searchContext = SearchContext.current(); try (SearchContext searchContext = SearchContext.current()) {
searchContext.clearAndRelease();
SearchContext.removeCurrent(); SearchContext.removeCurrent();
} }
}
return new PrimaryResponse<>(shardRequest.request, new ShardDeleteByQueryResponse(), null); return new PrimaryResponse<>(shardRequest.request, new ShardDeleteByQueryResponse(), null);
} }
@ -147,11 +147,11 @@ public class TransportShardDeleteByQueryAction extends TransportShardReplication
SearchContext.current().parsedQuery(new ParsedQuery(deleteByQuery.query(), ImmutableMap.<String, Filter>of())); SearchContext.current().parsedQuery(new ParsedQuery(deleteByQuery.query(), ImmutableMap.<String, Filter>of()));
indexShard.deleteByQuery(deleteByQuery); indexShard.deleteByQuery(deleteByQuery);
} finally { } finally {
SearchContext searchContext = SearchContext.current(); try (SearchContext searchContext = SearchContext.current();) {
searchContext.clearAndRelease();
SearchContext.removeCurrent(); SearchContext.removeCurrent();
} }
} }
}
@Override @Override
protected ShardIterator shards(ClusterState clusterState, ShardDeleteByQueryRequest request) { protected ShardIterator shards(ClusterState clusterState, ShardDeleteByQueryRequest request) {

View File

@ -41,6 +41,7 @@ import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException; import java.io.IOException;
import java.util.Set; import java.util.Set;
@ -123,7 +124,7 @@ public class ChildrenConstantScoreQuery extends Query {
shortCircuitFilter = new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds); shortCircuitFilter = new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
} }
final ParentWeight parentWeight = new ParentWeight(parentFilter, shortCircuitFilter, parentIds); final ParentWeight parentWeight = new ParentWeight(parentFilter, shortCircuitFilter, parentIds);
searchContext.addReleasable(parentWeight); searchContext.addReleasable(parentWeight, Lifetime.COLLECTION);
releaseParentIds = false; releaseParentIds = false;
return parentWeight; return parentWeight;
} finally { } finally {

View File

@ -41,6 +41,7 @@ import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
@ -219,7 +220,7 @@ public class ChildrenQuery extends Query {
parentFilter = new ApplyAcceptedDocsFilter(this.parentFilter); parentFilter = new ApplyAcceptedDocsFilter(this.parentFilter);
} }
ParentWeight parentWeight = new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentFilter, size, parentIds, scores, occurrences); ParentWeight parentWeight = new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentFilter, size, parentIds, scores, occurrences);
searchContext.addReleasable(parentWeight); searchContext.addReleasable(parentWeight, Lifetime.COLLECTION);
return parentWeight; return parentWeight;
} }

View File

@ -27,6 +27,7 @@ import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.docset.DocIdSets; import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.NoCacheFilter; import org.elasticsearch.common.lucene.search.NoCacheFilter;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException; import java.io.IOException;
import java.util.IdentityHashMap; import java.util.IdentityHashMap;
@ -66,7 +67,7 @@ public class CustomQueryWrappingFilter extends NoCacheFilter implements Releasab
IndexSearcher searcher = searchContext.searcher(); IndexSearcher searcher = searchContext.searcher();
docIdSets = new IdentityHashMap<>(); docIdSets = new IdentityHashMap<>();
this.searcher = searcher; this.searcher = searcher;
searchContext.addReleasable(this); searchContext.addReleasable(this, Lifetime.COLLECTION);
final Weight weight = searcher.createNormalizedWeight(query); final Weight weight = searcher.createNormalizedWeight(query);
for (final AtomicReaderContext leaf : searcher.getTopReaderContext().leaves()) { for (final AtomicReaderContext leaf : searcher.getTopReaderContext().leaves()) {

View File

@ -36,6 +36,7 @@ import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData; import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException; import java.io.IOException;
import java.util.Set; import java.util.Set;
@ -104,7 +105,7 @@ public class ParentConstantScoreQuery extends Query {
} }
final ChildrenWeight childrenWeight = new ChildrenWeight(childrenFilter, parentIds); final ChildrenWeight childrenWeight = new ChildrenWeight(childrenFilter, parentIds);
searchContext.addReleasable(childrenWeight); searchContext.addReleasable(childrenWeight, Lifetime.COLLECTION);
releaseParentIds = false; releaseParentIds = false;
return childrenWeight; return childrenWeight;
} finally { } finally {

View File

@ -40,6 +40,7 @@ import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData; import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException; import java.io.IOException;
import java.util.Set; import java.util.Set;
@ -156,7 +157,7 @@ public class ParentQuery extends Query {
Releasables.close(collector.parentIds, collector.scores); Releasables.close(collector.parentIds, collector.scores);
} }
} }
searchContext.addReleasable(childWeight); searchContext.addReleasable(childWeight, Lifetime.COLLECTION);
return childWeight; return childWeight;
} }

View File

@ -38,6 +38,7 @@ import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
@ -168,7 +169,7 @@ public class TopChildrenQuery extends Query {
} }
ParentWeight parentWeight = new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentDocs); ParentWeight parentWeight = new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentDocs);
searchContext.addReleasable(parentWeight); searchContext.addReleasable(parentWeight, Lifetime.COLLECTION);
return parentWeight; return parentWeight;
} }

View File

@ -23,7 +23,6 @@ import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.percolate.PercolateShardRequest; import org.elasticsearch.action.percolate.PercolateShardRequest;
import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler; import org.elasticsearch.cache.recycler.CacheRecycler;
@ -208,7 +207,7 @@ public class PercolateContext extends SearchContext {
} }
@Override @Override
public void close() throws ElasticsearchException { protected void doClose() {
try (Releasable releasable = Releasables.wrap(engineSearcher, docSearcher)) { try (Releasable releasable = Releasables.wrap(engineSearcher, docSearcher)) {
if (docSearcher != null) { if (docSearcher != null) {
IndexReader indexReader = docSearcher.reader(); IndexReader indexReader = docSearcher.reader();
@ -291,11 +290,6 @@ public class PercolateContext extends SearchContext {
} }
// Unused: // Unused:
@Override
public void clearAndRelease() {
throw new UnsupportedOperationException();
}
@Override @Override
public void preProcess() { public void preProcess() {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
@ -675,16 +669,6 @@ public class PercolateContext extends SearchContext {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
public void addReleasable(Releasable releasable) {
throw new UnsupportedOperationException();
}
@Override
public void clearReleasables() {
throw new UnsupportedOperationException();
}
@Override @Override
public ScanContext scanContext() { public ScanContext scanContext() {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();

View File

@ -69,10 +69,8 @@ import org.elasticsearch.search.dfs.CachedDfSource;
import org.elasticsearch.search.dfs.DfsPhase; import org.elasticsearch.search.dfs.DfsPhase;
import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.*; import org.elasticsearch.search.fetch.*;
import org.elasticsearch.search.internal.DefaultSearchContext; import org.elasticsearch.search.internal.*;
import org.elasticsearch.search.internal.InternalScrollSearchRequest; import org.elasticsearch.search.internal.SearchContext.Lifetime;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.query.*; import org.elasticsearch.search.query.*;
import org.elasticsearch.search.warmer.IndexWarmersMetaData; import org.elasticsearch.search.warmer.IndexWarmersMetaData;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
@ -575,6 +573,8 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
} }
private void cleanContext(SearchContext context) { private void cleanContext(SearchContext context) {
assert context == SearchContext.current();
context.clearReleasables(Lifetime.PHASE);
SearchContext.removeCurrent(); SearchContext.removeCurrent();
} }

View File

@ -107,7 +107,6 @@ public class AggregationPhase implements SearchPhase {
} }
Aggregator[] aggregators = context.aggregations().aggregators(); Aggregator[] aggregators = context.aggregations().aggregators();
try (Releasable releasable = Releasables.wrap(aggregators)) {
List<Aggregator> globals = new ArrayList<>(); List<Aggregator> globals = new ArrayList<>();
for (int i = 0; i < aggregators.length; i++) { for (int i = 0; i < aggregators.length; i++) {
if (aggregators[i] instanceof GlobalAggregator) { if (aggregators[i] instanceof GlobalAggregator) {
@ -138,8 +137,6 @@ public class AggregationPhase implements SearchPhase {
context.queryResult().aggregations(new InternalAggregations(aggregations)); context.queryResult().aggregations(new InternalAggregations(aggregations));
} }
}
public static class AggregationsCollector extends XCollector { public static class AggregationsCollector extends XCollector {

View File

@ -19,12 +19,12 @@
package org.elasticsearch.search.aggregations; package org.elasticsearch.search.aggregations;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.ReaderContextAware; import org.elasticsearch.common.lucene.ReaderContextAware;
import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -84,6 +84,9 @@ public abstract class Aggregator implements Releasable, ReaderContextAware {
assert factories != null : "sub-factories provided to BucketAggregator must not be null, use AggragatorFactories.EMPTY instead"; assert factories != null : "sub-factories provided to BucketAggregator must not be null, use AggragatorFactories.EMPTY instead";
this.factories = factories; this.factories = factories;
this.subAggregators = factories.createSubAggregators(this, estimatedBucketsCount); this.subAggregators = factories.createSubAggregators(this, estimatedBucketsCount);
// TODO: change it to SEARCH_PHASE, but this would imply allocating the aggregators in the QUERY
// phase instead of DFS like it is done today
context.searchContext().addReleasable(this, Lifetime.CONTEXT);
} }
/** /**
@ -175,10 +178,8 @@ public abstract class Aggregator implements Releasable, ReaderContextAware {
/** Called upon release of the aggregator. */ /** Called upon release of the aggregator. */
@Override @Override
public void close() { public void close() {
try (Releasable releasable = Releasables.wrap(subAggregators)) {
doClose(); doClose();
} }
}
/** Release instance-specific data. */ /** Release instance-specific data. */
protected void doClose() {} protected void doClose() {}

View File

@ -18,8 +18,6 @@
*/ */
package org.elasticsearch.search.aggregations; package org.elasticsearch.search.aggregations;
import com.google.common.collect.Iterables;
import com.google.common.collect.UnmodifiableIterator;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.ObjectArray;
@ -28,8 +26,6 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List; import java.util.List;
/** /**
@ -82,9 +78,6 @@ public class AggregatorFactories {
long arraySize = estimatedBucketsCount > 0 ? estimatedBucketsCount : 1; long arraySize = estimatedBucketsCount > 0 ? estimatedBucketsCount : 1;
aggregators = bigArrays.newObjectArray(arraySize); aggregators = bigArrays.newObjectArray(arraySize);
aggregators.set(0, first); aggregators.set(0, first);
for (long i = 1; i < arraySize; ++i) {
aggregators.set(i, createAndRegisterContextAware(parent.context(), factory, parent, estimatedBucketsCount));
}
} }
@Override @Override
@ -135,29 +128,7 @@ public class AggregatorFactories {
@Override @Override
public void doClose() { public void doClose() {
final Iterable<Aggregator> aggregatorsIter = new Iterable<Aggregator>() { Releasables.close(aggregators);
@Override
public Iterator<Aggregator> iterator() {
return new UnmodifiableIterator<Aggregator>() {
long i = 0;
@Override
public boolean hasNext() {
return i < aggregators.size();
}
@Override
public Aggregator next() {
return aggregators.get(i++);
}
};
}
};
Releasables.close(Iterables.concat(aggregatorsIter, Collections.singleton(aggregators)));
} }
}; };
} }

View File

@ -30,6 +30,7 @@ import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
@ -97,7 +98,7 @@ public class MatchedQueriesFetchSubPhase implements FetchSubPhase {
} catch (IOException e) { } catch (IOException e) {
// ignore // ignore
} finally { } finally {
SearchContext.current().clearReleasables(); SearchContext.current().clearReleasables(Lifetime.COLLECTION);
} }
} }
} }

View File

@ -21,6 +21,8 @@ package org.elasticsearch.search.internal;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.MinimumScoreCollector; import org.elasticsearch.common.lucene.MinimumScoreCollector;
import org.elasticsearch.common.lucene.MultiCollector; import org.elasticsearch.common.lucene.MultiCollector;
import org.elasticsearch.common.lucene.search.FilteredCollector; import org.elasticsearch.common.lucene.search.FilteredCollector;
@ -28,6 +30,7 @@ import org.elasticsearch.common.lucene.search.XCollector;
import org.elasticsearch.common.lucene.search.XFilteredQuery; import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.search.dfs.CachedDfSource; import org.elasticsearch.search.dfs.CachedDfSource;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -36,7 +39,7 @@ import java.util.List;
/** /**
* Context-aware extension of {@link IndexSearcher}. * Context-aware extension of {@link IndexSearcher}.
*/ */
public class ContextIndexSearcher extends IndexSearcher { public class ContextIndexSearcher extends IndexSearcher implements Releasable {
public static enum Stage { public static enum Stage {
NA, NA,
@ -66,10 +69,9 @@ public class ContextIndexSearcher extends IndexSearcher {
setSimilarity(searcher.searcher().getSimilarity()); setSimilarity(searcher.searcher().getSimilarity());
} }
public void release() { @Override
if (mainDocIdSetCollector != null) { public void close() {
mainDocIdSetCollector.release(); Releasables.close(mainDocIdSetCollector);
}
} }
public void dfSource(CachedDfSource dfSource) { public void dfSource(CachedDfSource dfSource) {
@ -129,7 +131,7 @@ public class ContextIndexSearcher extends IndexSearcher {
} }
return in.createNormalizedWeight(query); return in.createNormalizedWeight(query);
} catch (Throwable t) { } catch (Throwable t) {
searchContext.clearReleasables(); searchContext.clearReleasables(Lifetime.COLLECTION);
throw new RuntimeException(t); throw new RuntimeException(t);
} }
} }
@ -187,7 +189,7 @@ public class ContextIndexSearcher extends IndexSearcher {
} }
} }
} finally { } finally {
searchContext.clearReleasables(); searchContext.clearReleasables(Lifetime.COLLECTION);
} }
} }
@ -200,7 +202,7 @@ public class ContextIndexSearcher extends IndexSearcher {
XFilteredQuery filteredQuery = new XFilteredQuery(query, searchContext.aliasFilter()); XFilteredQuery filteredQuery = new XFilteredQuery(query, searchContext.aliasFilter());
return super.explain(filteredQuery, doc); return super.explain(filteredQuery, doc);
} finally { } finally {
searchContext.clearReleasables(); searchContext.clearReleasables(Lifetime.COLLECTION);
} }
} }
} }

View File

@ -30,7 +30,6 @@ import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler; import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.search.AndFilter; import org.elasticsearch.common.lucene.search.AndFilter;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
@ -177,8 +176,6 @@ public class DefaultSearchContext extends SearchContext {
private volatile long lastAccessTime = -1; private volatile long lastAccessTime = -1;
private List<Releasable> clearables = null;
private volatile boolean useSlowScroll; private volatile boolean useSlowScroll;
public DefaultSearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget, public DefaultSearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget,
@ -207,18 +204,12 @@ public class DefaultSearchContext extends SearchContext {
} }
@Override @Override
public void close() throws ElasticsearchException { public void doClose() throws ElasticsearchException {
if (scanContext != null) { if (scanContext != null) {
scanContext.clear(); scanContext.clear();
} }
// clear and scope phase we have // clear and scope phase we have
searcher.release(); Releasables.close(searcher, engineSearcher);
engineSearcher.close();
}
public void clearAndRelease() {
clearReleasables();
close();
} }
/** /**
@ -677,25 +668,6 @@ public class DefaultSearchContext extends SearchContext {
return fetchResult; return fetchResult;
} }
@Override
public void addReleasable(Releasable releasable) {
if (clearables == null) {
clearables = new ArrayList<>();
}
clearables.add(releasable);
}
@Override
public void clearReleasables() {
if (clearables != null) {
try {
Releasables.close(clearables);
} finally {
clearables.clear();
}
}
}
public ScanContext scanContext() { public ScanContext scanContext() {
if (scanContext == null) { if (scanContext == null) {
scanContext = new ScanContext(); scanContext = new ScanContext();

View File

@ -23,6 +23,7 @@ import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Collector; import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Scorer;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.docset.ContextDocIdSet; import org.elasticsearch.common.lucene.docset.ContextDocIdSet;
import org.elasticsearch.common.lucene.search.XCollector; import org.elasticsearch.common.lucene.search.XCollector;
import org.elasticsearch.index.cache.docset.DocSetCache; import org.elasticsearch.index.cache.docset.DocSetCache;
@ -33,7 +34,7 @@ import java.util.List;
/** /**
*/ */
public class DocIdSetCollector extends XCollector { public class DocIdSetCollector extends XCollector implements Releasable {
private final DocSetCache docSetCache; private final DocSetCache docSetCache;
private final Collector collector; private final Collector collector;
@ -53,7 +54,7 @@ public class DocIdSetCollector extends XCollector {
return docSets; return docSets;
} }
public void release() { public void close() {
for (ContextDocIdSet docSet : docSets) { for (ContextDocIdSet docSet : docSets) {
docSetCache.release(docSet); docSetCache.release(docSet);
} }

View File

@ -18,6 +18,9 @@
*/ */
package org.elasticsearch.search.internal; package org.elasticsearch.search.internal;
import com.google.common.collect.Iterables;
import com.google.common.collect.Multimap;
import com.google.common.collect.MultimapBuilder;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreDoc;
@ -27,6 +30,7 @@ import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.docset.DocSetCache; import org.elasticsearch.index.cache.docset.DocSetCache;
@ -59,6 +63,8 @@ import org.elasticsearch.search.rescore.RescoreSearchContext;
import org.elasticsearch.search.scan.ScanContext; import org.elasticsearch.search.scan.ScanContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List; import java.util.List;
/** /**
@ -81,7 +87,17 @@ public abstract class SearchContext implements Releasable {
return current.get(); return current.get();
} }
public abstract void clearAndRelease(); private Multimap<Lifetime, Releasable> clearables = null;
public final void close() {
try {
clearReleasables(Lifetime.CONTEXT);
} finally {
doClose();
}
}
protected abstract void doClose();
/** /**
* Should be called before executing the main query and after all other parameters have been set. * Should be called before executing the main query and after all other parameters have been set.
@ -288,9 +304,29 @@ public abstract class SearchContext implements Releasable {
public abstract FetchSearchResult fetchResult(); public abstract FetchSearchResult fetchResult();
public abstract void addReleasable(Releasable releasable); /**
* Schedule the release of a resource. The time when {@link Releasable#release()} will be called on this object
* is function of the provided {@link Lifetime}.
*/
public void addReleasable(Releasable releasable, Lifetime lifetime) {
if (clearables == null) {
clearables = MultimapBuilder.enumKeys(Lifetime.class).arrayListValues().build();
}
clearables.put(lifetime, releasable);
}
public abstract void clearReleasables(); public void clearReleasables(Lifetime lifetime) {
if (clearables != null) {
List<Collection<Releasable>> releasables = new ArrayList<>();
for (Lifetime lc : Lifetime.values()) {
if (lc.compareTo(lifetime) > 0) {
break;
}
releasables.add(clearables.removeAll(lc));
}
Releasables.close(Iterables.concat(releasables));
}
}
public abstract ScanContext scanContext(); public abstract ScanContext scanContext();
@ -305,4 +341,22 @@ public abstract class SearchContext implements Releasable {
public abstract boolean useSlowScroll(); public abstract boolean useSlowScroll();
public abstract SearchContext useSlowScroll(boolean useSlowScroll); public abstract SearchContext useSlowScroll(boolean useSlowScroll);
/**
* The life time of an object that is used during search execution.
*/
public enum Lifetime {
/**
* This life time is for objects that only live during collection time.
*/
COLLECTION,
/**
* This life time is for objects that need to live until the end of the current search phase.
*/
PHASE,
/**
* This life time is for objects that need to live until the search context they are attached to is destroyed.
*/
CONTEXT;
}
} }

View File

@ -26,7 +26,6 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler; import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.docset.DocSetCache; import org.elasticsearch.index.cache.docset.DocSetCache;
@ -94,11 +93,6 @@ public class TestSearchContext extends SearchContext {
this.indexFieldDataService = null; this.indexFieldDataService = null;
} }
@Override
public void clearAndRelease() {
// no-op
}
@Override @Override
public void preProcess() { public void preProcess() {
} }
@ -556,14 +550,6 @@ public class TestSearchContext extends SearchContext {
return null; return null;
} }
@Override
public void addReleasable(Releasable releasable) {
}
@Override
public void clearReleasables() {
}
@Override @Override
public ScanContext scanContext() { public ScanContext scanContext() {
return null; return null;
@ -590,7 +576,7 @@ public class TestSearchContext extends SearchContext {
} }
@Override @Override
public void close() throws ElasticsearchException { public void doClose() throws ElasticsearchException {
// no-op // no-op
} }

View File

@ -18,7 +18,6 @@
*/ */
package org.elasticsearch.search.aggregations.bucket; package org.elasticsearch.search.aggregations.bucket;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
@ -33,7 +32,6 @@ import org.elasticsearch.search.aggregations.metrics.stats.Stats;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats;
import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.cache.recycler.MockBigArrays;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Test; import org.junit.Test;
@ -217,7 +215,6 @@ public class DoubleTermsTests extends ElasticsearchIntegrationTest {
} }
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void singleValuedField_WithSubAggregation() throws Exception { public void singleValuedField_WithSubAggregation() throws Exception {
SearchResponse response = client().prepareSearch("idx").setTypes("type") SearchResponse response = client().prepareSearch("idx").setTypes("type")
@ -433,7 +430,6 @@ public class DoubleTermsTests extends ElasticsearchIntegrationTest {
} }
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void script_SingleValue() throws Exception { public void script_SingleValue() throws Exception {
SearchResponse response = client().prepareSearch("idx").setTypes("type") SearchResponse response = client().prepareSearch("idx").setTypes("type")
@ -615,7 +611,6 @@ public class DoubleTermsTests extends ElasticsearchIntegrationTest {
} }
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void emptyAggregation() throws Exception { public void emptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
@ -756,11 +751,8 @@ public class DoubleTermsTests extends ElasticsearchIntegrationTest {
assertThat(max.getValue(), equalTo(asc ? 4.0 : 2.0)); assertThat(max.getValue(), equalTo(asc ? 4.0 : 2.0));
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void singleValuedField_OrderedByMissingSubAggregation() throws Exception { public void singleValuedField_OrderedByMissingSubAggregation() throws Exception {
MockBigArrays.discardNextCheck();
try { try {
client().prepareSearch("idx").setTypes("type") client().prepareSearch("idx").setTypes("type")
@ -776,11 +768,8 @@ public class DoubleTermsTests extends ElasticsearchIntegrationTest {
} }
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void singleValuedField_OrderedByNonMetricsOrMultiBucketSubAggregation() throws Exception { public void singleValuedField_OrderedByNonMetricsOrMultiBucketSubAggregation() throws Exception {
MockBigArrays.discardNextCheck();
try { try {
client().prepareSearch("idx").setTypes("type") client().prepareSearch("idx").setTypes("type")
@ -797,11 +786,8 @@ public class DoubleTermsTests extends ElasticsearchIntegrationTest {
} }
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void singleValuedField_OrderedByMultiValuedSubAggregation_WithUknownMetric() throws Exception { public void singleValuedField_OrderedByMultiValuedSubAggregation_WithUknownMetric() throws Exception {
MockBigArrays.discardNextCheck();
try { try {
client().prepareSearch("idx").setTypes("type") client().prepareSearch("idx").setTypes("type")
@ -819,11 +805,8 @@ public class DoubleTermsTests extends ElasticsearchIntegrationTest {
} }
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void singleValuedField_OrderedByMultiValuedSubAggregation_WithoutMetric() throws Exception { public void singleValuedField_OrderedByMultiValuedSubAggregation_WithoutMetric() throws Exception {
MockBigArrays.discardNextCheck();
try { try {
client().prepareSearch("idx").setTypes("type") client().prepareSearch("idx").setTypes("type")
@ -841,7 +824,6 @@ public class DoubleTermsTests extends ElasticsearchIntegrationTest {
} }
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void singleValuedField_OrderedBySingleValueSubAggregationDesc() throws Exception { public void singleValuedField_OrderedBySingleValueSubAggregationDesc() throws Exception {
boolean asc = false; boolean asc = false;

View File

@ -18,7 +18,6 @@
*/ */
package org.elasticsearch.search.aggregations.bucket; package org.elasticsearch.search.aggregations.bucket;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
@ -32,7 +31,6 @@ import org.elasticsearch.search.aggregations.metrics.stats.Stats;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats;
import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.cache.recycler.MockBigArrays;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Test; import org.junit.Test;
@ -513,7 +511,6 @@ public class LongTermsTests extends ElasticsearchIntegrationTest {
} }
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void script_MultiValued_WithAggregatorInherited_NoExplicitType() throws Exception { public void script_MultiValued_WithAggregatorInherited_NoExplicitType() throws Exception {
@ -753,11 +750,8 @@ public class LongTermsTests extends ElasticsearchIntegrationTest {
assertThat(max.getValue(), equalTo(asc ? 4.0 : 2.0)); assertThat(max.getValue(), equalTo(asc ? 4.0 : 2.0));
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void singleValuedField_OrderedByMissingSubAggregation() throws Exception { public void singleValuedField_OrderedByMissingSubAggregation() throws Exception {
MockBigArrays.discardNextCheck();
try { try {
client().prepareSearch("idx").setTypes("type") client().prepareSearch("idx").setTypes("type")
@ -773,11 +767,8 @@ public class LongTermsTests extends ElasticsearchIntegrationTest {
} }
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void singleValuedField_OrderedByNonMetricsOrMultiBucketSubAggregation() throws Exception { public void singleValuedField_OrderedByNonMetricsOrMultiBucketSubAggregation() throws Exception {
MockBigArrays.discardNextCheck();
try { try {
client().prepareSearch("idx").setTypes("type") client().prepareSearch("idx").setTypes("type")
@ -794,11 +785,8 @@ public class LongTermsTests extends ElasticsearchIntegrationTest {
} }
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void singleValuedField_OrderedByMultiValuedSubAggregation_WithUknownMetric() throws Exception { public void singleValuedField_OrderedByMultiValuedSubAggregation_WithUknownMetric() throws Exception {
MockBigArrays.discardNextCheck();
try { try {
client().prepareSearch("idx").setTypes("type") client().prepareSearch("idx").setTypes("type")
@ -816,11 +804,8 @@ public class LongTermsTests extends ElasticsearchIntegrationTest {
} }
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void singleValuedField_OrderedByMultiValuedSubAggregation_WithoutMetric() throws Exception { public void singleValuedField_OrderedByMultiValuedSubAggregation_WithoutMetric() throws Exception {
MockBigArrays.discardNextCheck();
try { try {
client().prepareSearch("idx").setTypes("type") client().prepareSearch("idx").setTypes("type")

View File

@ -18,7 +18,6 @@
*/ */
package org.elasticsearch.search.aggregations.bucket; package org.elasticsearch.search.aggregations.bucket;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
@ -33,7 +32,6 @@ import org.elasticsearch.search.aggregations.metrics.max.Max;
import org.elasticsearch.search.aggregations.metrics.stats.Stats; import org.elasticsearch.search.aggregations.metrics.stats.Stats;
import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.cache.recycler.MockBigArrays;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Test; import org.junit.Test;
@ -186,10 +184,8 @@ public class NestedTests extends ElasticsearchIntegrationTest {
assertThat(stats.getAvg(), equalTo((double) sum / count)); assertThat(stats.getAvg(), equalTo((double) sum / count));
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void onNonNestedField() throws Exception { public void onNonNestedField() throws Exception {
MockBigArrays.discardNextCheck();
try { try {
client().prepareSearch("idx") client().prepareSearch("idx")
.addAggregation(nested("nested").path("value") .addAggregation(nested("nested").path("value")

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.aggregations.bucket; package org.elasticsearch.search.aggregations.bucket;
import com.google.common.base.Strings; import com.google.common.base.Strings;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
@ -27,12 +26,12 @@ import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode;
import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.metrics.avg.Avg;
import org.elasticsearch.search.aggregations.metrics.stats.Stats; import org.elasticsearch.search.aggregations.metrics.stats.Stats;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats;
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount; import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount;
import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.cache.recycler.MockBigArrays;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Test; import org.junit.Test;
@ -45,7 +44,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.FilterBuilders.termFilter; import static org.elasticsearch.index.query.FilterBuilders.termFilter;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.*; import static org.elasticsearch.search.aggregations.AggregationBuilders.*;
import static org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
@ -943,11 +941,8 @@ public class StringTermsTests extends ElasticsearchIntegrationTest {
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void singleValuedField_OrderedByMissingSubAggregation() throws Exception { public void singleValuedField_OrderedByMissingSubAggregation() throws Exception {
MockBigArrays.discardNextCheck();
try { try {
client().prepareSearch("idx").setTypes("type") client().prepareSearch("idx").setTypes("type")
@ -964,11 +959,8 @@ public class StringTermsTests extends ElasticsearchIntegrationTest {
} }
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void singleValuedField_OrderedByNonMetricsOrMultiBucketSubAggregation() throws Exception { public void singleValuedField_OrderedByNonMetricsOrMultiBucketSubAggregation() throws Exception {
MockBigArrays.discardNextCheck();
try { try {
client().prepareSearch("idx").setTypes("type") client().prepareSearch("idx").setTypes("type")
@ -986,11 +978,8 @@ public class StringTermsTests extends ElasticsearchIntegrationTest {
} }
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void singleValuedField_OrderedByMultiValuedSubAggregation_WithUknownMetric() throws Exception { public void singleValuedField_OrderedByMultiValuedSubAggregation_WithUknownMetric() throws Exception {
MockBigArrays.discardNextCheck();
try { try {
client().prepareSearch("idx").setTypes("type") client().prepareSearch("idx").setTypes("type")
.addAggregation(terms("terms") .addAggregation(terms("terms")
@ -1008,11 +997,8 @@ public class StringTermsTests extends ElasticsearchIntegrationTest {
} }
} }
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/5703")
@Test @Test
public void singleValuedField_OrderedByMultiValuedSubAggregation_WithoutMetric() throws Exception { public void singleValuedField_OrderedByMultiValuedSubAggregation_WithoutMetric() throws Exception {
MockBigArrays.discardNextCheck();
try { try {
client().prepareSearch("idx").setTypes("type") client().prepareSearch("idx").setTypes("type")

View File

@ -45,26 +45,11 @@ public class MockBigArrays extends BigArrays {
*/ */
private static final boolean TRACK_ALLOCATIONS = false; private static final boolean TRACK_ALLOCATIONS = false;
private static boolean DISCARD = false;
private static ConcurrentMap<Object, Object> ACQUIRED_ARRAYS = new ConcurrentHashMap<>(); private static ConcurrentMap<Object, Object> ACQUIRED_ARRAYS = new ConcurrentHashMap<>();
/**
* Discard the next check that all arrays should be released. This can be useful if for a specific test, the cost to make
* sure the array is released is higher than the cost the user would experience if the array would not be released.
*/
public static void discardNextCheck() {
DISCARD = true;
}
public static void ensureAllArraysAreReleased() throws Exception { public static void ensureAllArraysAreReleased() throws Exception {
if (DISCARD) {
DISCARD = false;
} else {
final Map<Object, Object> masterCopy = Maps.newHashMap(ACQUIRED_ARRAYS); final Map<Object, Object> masterCopy = Maps.newHashMap(ACQUIRED_ARRAYS);
if (masterCopy.isEmpty()) { if (!masterCopy.isEmpty()) {
return;
}
// not empty, we might be executing on a shared cluster that keeps on obtaining // not empty, we might be executing on a shared cluster that keeps on obtaining
// and releasing arrays, lets make sure that after a reasonable timeout, all master // and releasing arrays, lets make sure that after a reasonable timeout, all master
// copy (snapshot) have been released // copy (snapshot) have been released
@ -74,9 +59,7 @@ public class MockBigArrays extends BigArrays {
return Sets.intersection(masterCopy.keySet(), ACQUIRED_ARRAYS.keySet()).isEmpty(); return Sets.intersection(masterCopy.keySet(), ACQUIRED_ARRAYS.keySet()).isEmpty();
} }
}); });
if (success) { if (!success) {
return;
}
masterCopy.keySet().retainAll(ACQUIRED_ARRAYS.keySet()); masterCopy.keySet().retainAll(ACQUIRED_ARRAYS.keySet());
ACQUIRED_ARRAYS.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on ACQUIRED_ARRAYS.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on
if (!masterCopy.isEmpty()) { if (!masterCopy.isEmpty()) {
@ -85,6 +68,7 @@ public class MockBigArrays extends BigArrays {
} }
} }
} }
}
private final Random random; private final Random random;

View File

@ -43,9 +43,7 @@ public class MockPageCacheRecycler extends PageCacheRecycler {
public static void ensureAllPagesAreReleased() throws Exception { public static void ensureAllPagesAreReleased() throws Exception {
final Map<Object, Throwable> masterCopy = Maps.newHashMap(ACQUIRED_PAGES); final Map<Object, Throwable> masterCopy = Maps.newHashMap(ACQUIRED_PAGES);
if (masterCopy.isEmpty()) { if (!masterCopy.isEmpty()) {
return;
}
// not empty, we might be executing on a shared cluster that keeps on obtaining // not empty, we might be executing on a shared cluster that keeps on obtaining
// and releasing pages, lets make sure that after a reasonable timeout, all master // and releasing pages, lets make sure that after a reasonable timeout, all master
// copy (snapshot) have been released // copy (snapshot) have been released
@ -55,9 +53,7 @@ public class MockPageCacheRecycler extends PageCacheRecycler {
return Sets.intersection(masterCopy.keySet(), ACQUIRED_PAGES.keySet()).isEmpty(); return Sets.intersection(masterCopy.keySet(), ACQUIRED_PAGES.keySet()).isEmpty();
} }
}); });
if (success) { if (!success) {
return;
}
masterCopy.keySet().retainAll(ACQUIRED_PAGES.keySet()); masterCopy.keySet().retainAll(ACQUIRED_PAGES.keySet());
ACQUIRED_PAGES.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on ACQUIRED_PAGES.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on
if (!masterCopy.isEmpty()) { if (!masterCopy.isEmpty()) {
@ -65,6 +61,8 @@ public class MockPageCacheRecycler extends PageCacheRecycler {
throw new RuntimeException(masterCopy.size() + " pages have not been released", t); throw new RuntimeException(masterCopy.size() + " pages have not been released", t);
} }
} }
}
}
private final Random random; private final Random random;