Reduce performance impact of ExitableDirectoryReader () ()

Benchmarking showed that the effect of the ExitableDirectoryReader
is reduced considerably when checking every 8191 docs. Moreover,
set the cancellable task before calling QueryPhase#preProcess()
and make sure we don't wrap with an ExitableDirectoryReader at all
when lowLevelCancellation is set to false to avoid completely any
performance impact.

Follows: 
Follows: 
Follows: 

(cherry picked from commit cdc377e8e74d3ca6c231c36dc5e80621aab47c69)
This commit is contained in:
Marios Trivyzas 2020-03-23 21:30:34 +01:00 committed by GitHub
parent 286c3660bd
commit 3a3e964956
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 64 additions and 66 deletions
server/src
test/framework/src/main/java/org/elasticsearch/search/aggregations
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol

@ -101,6 +101,7 @@ final class DefaultSearchContext extends SearchContext {
private final QuerySearchResult queryResult;
private final FetchSearchResult fetchResult;
private final float queryBoost;
private final boolean lowLevelCancellation;
private TimeValue timeout;
// terminate after count
private int terminateAfter = DEFAULT_TERMINATE_AFTER;
@ -121,7 +122,6 @@ final class DefaultSearchContext extends SearchContext {
private int trackTotalHitsUpTo = SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO;
private FieldDoc searchAfter;
private CollapseContext collapse;
private boolean lowLevelCancellation;
// filter for sliced scroll
private SliceBuilder sliceBuilder;
private SearchShardTask task;
@ -160,7 +160,7 @@ final class DefaultSearchContext extends SearchContext {
DefaultSearchContext(SearchContextId id, ShardSearchRequest request, SearchShardTarget shardTarget,
Engine.Searcher engineSearcher, ClusterService clusterService, IndexService indexService,
IndexShard indexShard, BigArrays bigArrays, LongSupplier relativeTimeSupplier, TimeValue timeout,
FetchPhase fetchPhase, Version minNodeVersion) throws IOException {
FetchPhase fetchPhase, boolean lowLevelCancellation, Version minNodeVersion) throws IOException {
this.id = id;
this.request = request;
this.fetchPhase = fetchPhase;
@ -176,7 +176,7 @@ final class DefaultSearchContext extends SearchContext {
this.indexService = indexService;
this.clusterService = clusterService;
this.searcher = new ContextIndexSearcher(engineSearcher.getIndexReader(), engineSearcher.getSimilarity(),
engineSearcher.getQueryCache(), engineSearcher.getQueryCachingPolicy());
engineSearcher.getQueryCache(), engineSearcher.getQueryCachingPolicy(), lowLevelCancellation);
this.relativeTimeSupplier = relativeTimeSupplier;
this.timeout = timeout;
this.minNodeVersion = minNodeVersion;
@ -184,6 +184,7 @@ final class DefaultSearchContext extends SearchContext {
request::nowInMillis, shardTarget.getClusterAlias());
queryShardContext.setTypes(request.types());
queryBoost = request.indexBoost();
this.lowLevelCancellation = lowLevelCancellation;
}
@Override
@ -585,10 +586,6 @@ final class DefaultSearchContext extends SearchContext {
return lowLevelCancellation;
}
public void lowLevelCancellation(boolean lowLevelCancellation) {
this.lowLevelCancellation = lowLevelCancellation;
}
@Override
public FieldDoc searchAfter() {
return searchAfter;

@ -339,10 +339,9 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
}
private DfsSearchResult executeDfsPhase(SearchRewriteContext rewriteContext, SearchShardTask task) throws IOException {
final SearchContext context = createAndPutContext(rewriteContext);
final SearchContext context = createAndPutContext(rewriteContext, task);
context.incRef();
try {
context.setTask(task);
contextProcessing(context);
dfsPhase.execute(context);
contextProcessedSuccessfully(context);
@ -422,11 +421,10 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
}
private SearchPhaseResult executeQueryPhase(SearchRewriteContext rewriteContext, SearchShardTask task) throws Exception {
final SearchContext context = createAndPutContext(rewriteContext);
final SearchContext context = createAndPutContext(rewriteContext, task);
final ShardSearchRequest request = rewriteContext.request;
context.incRef();
try {
context.setTask(task);
final long afterQueryTime;
try (SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context)) {
contextProcessing(context);
@ -626,8 +624,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
}
}
final SearchContext createAndPutContext(SearchRewriteContext rewriteContext) throws IOException {
SearchContext context = createContext(rewriteContext);
final SearchContext createAndPutContext(SearchRewriteContext rewriteContext, SearchShardTask task) throws IOException {
SearchContext context = createContext(rewriteContext, task);
onNewContext(context);
boolean success = false;
try {
@ -660,7 +658,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
}
}
final SearchContext createContext(SearchRewriteContext rewriteContext) throws IOException {
final SearchContext createContext(SearchRewriteContext rewriteContext, SearchShardTask searchTask) throws IOException {
final DefaultSearchContext context = createSearchContext(rewriteContext, defaultSearchTimeout);
try {
final ShardSearchRequest request = rewriteContext.request;
@ -684,6 +682,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
if (context.size() == -1) {
context.size(DEFAULT_SIZE);
}
context.setTask(searchTask);
// pre process
dfsPhase.preProcess(context);
@ -696,7 +695,6 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
keepAlive = request.scroll().keepAlive().millis();
}
contextScrollKeepAlive(context, keepAlive);
context.lowLevelCancellation(lowLevelCancellation);
} catch (Exception e) {
context.close();
throw e;
@ -731,7 +729,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
final SearchContextId searchContextId = new SearchContextId(readerId, idGenerator.incrementAndGet());
DefaultSearchContext searchContext = new DefaultSearchContext(searchContextId, request, shardTarget,
searcher, clusterService, indexService, indexShard, bigArrays, threadPool::relativeTimeInMillis, timeout,
fetchPhase, clusterService.state().nodes().getMinNodeVersion());
fetchPhase, lowLevelCancellation, clusterService.state().nodes().getMinNodeVersion());
success = true;
return searchContext;
} finally {

@ -82,23 +82,20 @@ public class ContextIndexSearcher extends IndexSearcher {
private MutableQueryTimeout cancellable;
public ContextIndexSearcher(IndexReader reader, Similarity similarity,
QueryCache queryCache, QueryCachingPolicy queryCachingPolicy) throws IOException {
this(reader, similarity, queryCache, queryCachingPolicy, new MutableQueryTimeout());
QueryCache queryCache, QueryCachingPolicy queryCachingPolicy,
boolean wrapWithExitableDirectoryReader) throws IOException {
this(reader, similarity, queryCache, queryCachingPolicy, new MutableQueryTimeout(), wrapWithExitableDirectoryReader);
}
// TODO: Make the 2nd constructor private so that the IndexReader is always wrapped.
// Some issues must be fixed:
// - regarding tests deriving from AggregatorTestCase and more specifically the use of searchAndReduce and
// the ShardSearcher sub-searchers.
// - tests that use a MultiReader
public ContextIndexSearcher(IndexReader reader, Similarity similarity,
QueryCache queryCache, QueryCachingPolicy queryCachingPolicy,
MutableQueryTimeout cancellable) throws IOException {
super(cancellable != null ? new ExitableDirectoryReader((DirectoryReader) reader, cancellable) : reader);
private ContextIndexSearcher(IndexReader reader, Similarity similarity,
QueryCache queryCache, QueryCachingPolicy queryCachingPolicy,
MutableQueryTimeout cancellable,
boolean wrapWithExitableDirectoryReader) throws IOException {
super(wrapWithExitableDirectoryReader ? new ExitableDirectoryReader((DirectoryReader) reader, cancellable) : reader);
setSimilarity(similarity);
setQueryCache(queryCache);
setQueryCachingPolicy(queryCachingPolicy);
this.cancellable = cancellable != null ? cancellable : new MutableQueryTimeout();
this.cancellable = cancellable;
}
public void setProfiler(QueryProfiler profiler) {

@ -245,7 +245,7 @@ class ExitableDirectoryReader extends FilterDirectoryReader {
private static class ExitableIntersectVisitor implements PointValues.IntersectVisitor {
private static final int MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK = (1 << 10) - 1; // 1023
private static final int MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK = (1 << 13) - 1; // 8191
private final PointValues.IntersectVisitor in;
private final QueryCancellation queryCancellation;

@ -112,9 +112,9 @@ public class QueryPhase implements SearchPhase {
public void preProcess(SearchContext context) {
final Runnable cancellation;
if (context.lowLevelCancellation()) {
SearchShardTask task = context.getTask();
cancellation = context.searcher().addQueryCancellation(() -> {
if (task.isCancelled()) {
SearchShardTask task = context.getTask();
if (task != null && task.isCancelled()) {
throw new TaskCancelledException("cancelled");
}
});
@ -282,9 +282,9 @@ public class QueryPhase implements SearchPhase {
}
if (searchContext.lowLevelCancellation()) {
SearchShardTask task = searchContext.getTask();
searcher.addQueryCancellation(() -> {
if (task.isCancelled()) {
SearchShardTask task = searchContext.getTask();
if (task != null && task.isCancelled()) {
throw new TaskCancelledException("cancelled");
}
});

@ -122,7 +122,8 @@ public class DefaultSearchContextTests extends ESTestCase {
SearchShardTarget target = new SearchShardTarget("node", shardId, null, OriginalIndices.NONE);
DefaultSearchContext context1 = new DefaultSearchContext(new SearchContextId(UUIDs.randomBase64UUID(), 1L),
shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null, Version.CURRENT);
shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null,
false, Version.CURRENT);
context1.from(300);
// resultWindow greater than maxResultWindow and scrollContext is null
@ -163,7 +164,8 @@ public class DefaultSearchContextTests extends ESTestCase {
// rescore is null but sliceBuilder is not null
DefaultSearchContext context2 = new DefaultSearchContext(new SearchContextId(UUIDs.randomBase64UUID(), 2L),
shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null, Version.CURRENT);
shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null,
false, Version.CURRENT);
SliceBuilder sliceBuilder = mock(SliceBuilder.class);
int numSlices = maxSlicesPerScroll + randomIntBetween(1, 100);
@ -180,7 +182,8 @@ public class DefaultSearchContextTests extends ESTestCase {
when(shardSearchRequest.indexBoost()).thenReturn(AbstractQueryBuilder.DEFAULT_BOOST);
DefaultSearchContext context3 = new DefaultSearchContext(new SearchContextId(UUIDs.randomBase64UUID(), 3L),
shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null, Version.CURRENT);
shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null,
false, Version.CURRENT);
ParsedQuery parsedQuery = ParsedQuery.parsedMatchAllQuery();
context3.sliceBuilder(null).parsedQuery(parsedQuery).preProcess(false);
assertEquals(context3.query(), context3.buildFilteredQuery(parsedQuery.query()));
@ -190,7 +193,8 @@ public class DefaultSearchContextTests extends ESTestCase {
when(shardSearchRequest.indexRoutings()).thenReturn(new String[0]);
DefaultSearchContext context4 = new DefaultSearchContext(new SearchContextId(UUIDs.randomBase64UUID(), 4L),
shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null, Version.CURRENT);
shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null,
false, Version.CURRENT);
context4.sliceBuilder(new SliceBuilder(1,2)).parsedQuery(parsedQuery).preProcess(false);
Query query1 = context4.query();
context4.sliceBuilder(new SliceBuilder(0,2)).parsedQuery(parsedQuery).preProcess(false);

@ -90,8 +90,8 @@ public class SearchCancellationTests extends ESTestCase {
}
public void testAddingCancellationActions() throws IOException {
ContextIndexSearcher searcher = new ContextIndexSearcher(reader,
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy());
ContextIndexSearcher searcher = new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true);
NullPointerException npe = expectThrows(NullPointerException.class, () -> searcher.addQueryCancellation(null));
assertEquals("cancellation runnable should not be null", npe.getMessage());
@ -104,8 +104,8 @@ public class SearchCancellationTests extends ESTestCase {
public void testCancellableCollector() throws IOException {
TotalHitCountCollector collector1 = new TotalHitCountCollector();
Runnable cancellation = () -> { throw new TaskCancelledException("cancelled"); };
ContextIndexSearcher searcher = new ContextIndexSearcher(reader,
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy());
ContextIndexSearcher searcher = new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true);
searcher.search(new MatchAllDocsQuery(), collector1);
assertThat(collector1.getTotalHits(), equalTo(reader.numDocs()));
@ -120,14 +120,14 @@ public class SearchCancellationTests extends ESTestCase {
assertThat(collector2.getTotalHits(), equalTo(reader.numDocs()));
}
public void testCancellableDirectoryReader() throws IOException {
public void testExitableDirectoryReader() throws IOException {
AtomicBoolean cancelled = new AtomicBoolean(true);
Runnable cancellation = () -> {
if (cancelled.get()) {
throw new TaskCancelledException("cancelled");
}};
ContextIndexSearcher searcher = new ContextIndexSearcher(reader,
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy());
ContextIndexSearcher searcher = new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true);
searcher.addQueryCancellation(cancellation);
CompiledAutomaton automaton = new CompiledAutomaton(new RegExp("a.*").toAutomaton());

@ -368,7 +368,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1,
new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null),
indexShard);
final SearchContext contextWithDefaultTimeout = service.createContext(rewriteContext);
final SearchContext contextWithDefaultTimeout = service.createContext(rewriteContext, null);
try {
// the search context should inherit the default timeout
assertThat(contextWithDefaultTimeout.timeout(), equalTo(TimeValue.timeValueSeconds(5)));
@ -383,7 +383,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1,
new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null),
indexShard);
final SearchContext context = service.createContext(rewriteContext);
final SearchContext context = service.createContext(rewriteContext, null);
try {
// the search context should inherit the query timeout
assertThat(context.timeout(), equalTo(TimeValue.timeValueSeconds(seconds)));
@ -417,7 +417,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
{
SearchService.SearchRewriteContext rewriteContext = service.acquireSearcherAndRewrite(shardRequest, indexShard);
try (SearchContext context = service.createContext(rewriteContext)) {
try (SearchContext context = service.createContext(rewriteContext, null)) {
assertNotNull(context);
}
}
@ -426,7 +426,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
SearchService.SearchRewriteContext rewriteContext = service.acquireSearcherAndRewrite(shardRequest, indexShard);
searchSourceBuilder.docValueField("one_field_too_much");
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
() -> service.createContext(rewriteContext));
() -> service.createContext(rewriteContext, null));
assertEquals(
"Trying to retrieve too many docvalue_fields. Must be less than or equal to: [100] but was [101]. "
+ "This limit can be set by changing the [index.max_docvalue_fields_search] index level setting.", ex.getMessage());
@ -458,7 +458,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
{
SearchService.SearchRewriteContext rewriteContext = service.acquireSearcherAndRewrite(shardRequest, indexShard);
try (SearchContext context = service.createContext(rewriteContext)) {
try (SearchContext context = service.createContext(rewriteContext, null)) {
assertNotNull(context);
}
}
@ -468,7 +468,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap()));
SearchService.SearchRewriteContext rewriteContext = service.acquireSearcherAndRewrite(shardRequest, indexShard);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
() -> service.createContext(rewriteContext));
() -> service.createContext(rewriteContext, null));
assertEquals(
"Trying to retrieve too many script_fields. Must be less than or equal to: [" + maxScriptFields + "] but was ["
+ (maxScriptFields + 1)
@ -494,7 +494,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1,
new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null),
indexShard);
try (SearchContext context = service.createContext(rewriteContext)) {
try (SearchContext context = service.createContext(rewriteContext, null)) {
assertEquals(0, context.scriptFields().fields().size());
}
}
@ -531,7 +531,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
SearchService.SearchRewriteContext rewriteContext =
service.acquireSearcherAndRewrite(new ShardScrollRequestTest(indexShard.shardId()), indexShard);
ElasticsearchException ex = expectThrows(ElasticsearchException.class,
() -> service.createAndPutContext(rewriteContext));
() -> service.createAndPutContext(rewriteContext, null));
assertEquals(
"Trying to create too many scroll contexts. Must be less than or equal to: [" +
SearchService.MAX_OPEN_SCROLL_CONTEXT.get(Settings.EMPTY) + "]. " +
@ -557,7 +557,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
SearchService.SearchRewriteContext rewriteContext =
searchService.acquireSearcherAndRewrite(new ShardScrollRequestTest(indexShard.shardId()), indexShard);
try {
searchService.createAndPutContext(rewriteContext);
searchService.createAndPutContext(rewriteContext, null);
} catch (ElasticsearchException e) {
assertThat(e.getMessage(), equalTo(
"Trying to create too many scroll contexts. Must be less than or equal to: " +
@ -835,7 +835,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
}
}, indexShard);
NullPointerException e = expectThrows(NullPointerException.class,
() -> service.createContext(rewriteContext));
() -> service.createContext(rewriteContext, null));
assertEquals("expected", e.getMessage());
assertEquals("should have 2 store refs (IndexService + InternalEngine)", 2, indexService.getShard(0).store().refCount());
}
@ -988,7 +988,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
int numContexts = randomIntBetween(1, 10);
for (int i = 0; i < numContexts; i++) {
SearchService.SearchRewriteContext rewriteContext = searchService.acquireSearcherAndRewrite(shardSearchRequest, indexShard);
final SearchContext searchContext = searchService.createContext(rewriteContext);
final SearchContext searchContext = searchService.createContext(rewriteContext, null);
assertThat(searchContext.id().getId(), equalTo((long) (i + 1)));
searchService.putContext(searchContext);
contextIds.add(searchContext.id());

@ -240,7 +240,7 @@ public class ContextIndexSearcherTests extends ESTestCase {
DocumentSubsetDirectoryReader filteredReader = new DocumentSubsetDirectoryReader(reader, cache, roleQuery);
ContextIndexSearcher searcher = new ContextIndexSearcher(filteredReader, IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy());
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true);
// Assert wrapping
assertEquals(ExitableDirectoryReader.class, searcher.getIndexReader().getClass());

@ -43,8 +43,8 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.search.Weight;
import org.apache.lucene.store.Directory;
import org.elasticsearch.core.internal.io.IOUtils;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.profile.ProfileResult;
import org.elasticsearch.test.ESTestCase;
@ -83,7 +83,7 @@ public class QueryProfilerTests extends ESTestCase {
reader = w.getReader();
w.close();
searcher = new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(), ALWAYS_CACHE_POLICY);
IndexSearcher.getDefaultQueryCache(), ALWAYS_CACHE_POLICY, true);
}
@After

@ -925,12 +925,12 @@ public class QueryPhaseTests extends IndexShardTestCase {
private static ContextIndexSearcher newContextSearcher(IndexReader reader) throws IOException {
return new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy());
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true);
}
private static ContextIndexSearcher newEarlyTerminationContextSearcher(IndexReader reader, int size) throws IOException {
return new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy()) {
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true) {
@Override
public void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
@ -943,7 +943,7 @@ public class QueryPhaseTests extends IndexShardTestCase {
// used to check that numeric long or date sort optimization was run
private static ContextIndexSearcher newOptimizedContextSearcher(IndexReader reader, int queryType) throws IOException {
return new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy()) {
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true) {
@Override
public void search(List<LeafReaderContext> leaves, Weight weight, CollectorManager manager,

@ -204,7 +204,7 @@ public abstract class AggregatorTestCase extends ESTestCase {
}
};
ContextIndexSearcher contextIndexSearcher = new ContextIndexSearcher(indexSearcher.getIndexReader(),
indexSearcher.getSimilarity(), queryCache, queryCachingPolicy, null);
indexSearcher.getSimilarity(), queryCache, queryCachingPolicy, false);
SearchContext searchContext = mock(SearchContext.class);
when(searchContext.numberOfShards()).thenReturn(1);

@ -161,8 +161,9 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT
when(queryShardContext.toQuery(new TermsQueryBuilder("field", values[i]))).thenReturn(parsedQuery);
DirectoryReader wrappedDirectoryReader = wrapper.apply(directoryReader);
IndexSearcher indexSearcher = new ContextIndexSearcher(wrappedDirectoryReader,
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy());
IndexSearcher indexSearcher = new ContextIndexSearcher(
wrappedDirectoryReader, IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(),
IndexSearcher.getDefaultQueryCachingPolicy(), true);
int expectedHitCount = valuesHitCount[i];
logger.info("Going to verify hit count with query [{}] with expected total hits [{}]", parsedQuery.query(), expectedHitCount);
@ -270,8 +271,9 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT
DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId);
DirectoryReader wrappedDirectoryReader = wrapper.apply(directoryReader);
IndexSearcher indexSearcher = new ContextIndexSearcher(wrappedDirectoryReader,
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy());
IndexSearcher indexSearcher = new ContextIndexSearcher(
wrappedDirectoryReader, IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(),
IndexSearcher.getDefaultQueryCachingPolicy(), true);
ScoreDoc[] hits = indexSearcher.search(new MatchAllDocsQuery(), 1000).scoreDocs;
Set<Integer> actualDocIds = new HashSet<>();