Refactor IndexSearcherWrapper to disallow the wrapping of IndexSearcher (#43645)
This change removes the ability to wrap an IndexSearcher in plugins. The IndexSearcherWrapper is replaced by an IndexReaderWrapper and allows to wrap the DirectoryReader only. This simplifies the creation of the context IndexSearcher that is used on a per request basis. This change also moves the optimization that was implemented in the security index searcher wrapper to the ContextIndexSearcher that now checks the live docs to determine how the search should be executed. If the underlying live docs is a sparse bit set the searcher will compute the intersection betweeen the query and the live docs instead of checking the live docs on every document that match the query.
This commit is contained in:
parent
377c4cfdc0
commit
7ca69db83f
|
@ -1,46 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.apache.lucene.search;
|
|
||||||
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A wrapper for {@link IndexSearcher} that makes {@link IndexSearcher#search(List, Weight, Collector)}
|
|
||||||
* visible by sub-classes.
|
|
||||||
*/
|
|
||||||
public class XIndexSearcher extends IndexSearcher {
|
|
||||||
private final IndexSearcher in;
|
|
||||||
|
|
||||||
public XIndexSearcher(IndexSearcher in) {
|
|
||||||
super(in.getIndexReader());
|
|
||||||
this.in = in;
|
|
||||||
setSimilarity(in.getSimilarity());
|
|
||||||
setQueryCache(in.getQueryCache());
|
|
||||||
setQueryCachingPolicy(in.getQueryCachingPolicy());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
|
|
||||||
in.search(leaves, weight, collector);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -0,0 +1,117 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.lucene.util;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A {@link BitSet} implementation that combines two instances of {@link BitSet} and {@link Bits}
|
||||||
|
* to provide a single merged view.
|
||||||
|
*/
|
||||||
|
public final class CombinedBitSet extends BitSet implements Bits {
|
||||||
|
private final BitSet first;
|
||||||
|
private final Bits second;
|
||||||
|
private final int length;
|
||||||
|
|
||||||
|
public CombinedBitSet(BitSet first, Bits second) {
|
||||||
|
this.first = first;
|
||||||
|
this.second = second;
|
||||||
|
this.length = first.length();
|
||||||
|
}
|
||||||
|
|
||||||
|
public BitSet getFirst() {
|
||||||
|
return first;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This implementation is slow and requires to iterate over all bits to compute
|
||||||
|
* the intersection. Use {@link #approximateCardinality()} for
|
||||||
|
* a fast approximation.
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public int cardinality() {
|
||||||
|
int card = 0;
|
||||||
|
for (int i = 0; i < length; i++) {
|
||||||
|
card += get(i) ? 1 : 0;
|
||||||
|
}
|
||||||
|
return card;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int approximateCardinality() {
|
||||||
|
return first.cardinality();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int prevSetBit(int index) {
|
||||||
|
assert index >= 0 && index < length : "index=" + index + ", numBits=" + length();
|
||||||
|
int prev = first.prevSetBit(index);
|
||||||
|
while (prev != -1 && second.get(prev) == false) {
|
||||||
|
if (prev == 0) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
prev = first.prevSetBit(prev-1);
|
||||||
|
}
|
||||||
|
return prev;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int nextSetBit(int index) {
|
||||||
|
assert index >= 0 && index < length : "index=" + index + " numBits=" + length();
|
||||||
|
int next = first.nextSetBit(index);
|
||||||
|
while (next != DocIdSetIterator.NO_MORE_DOCS && second.get(next) == false) {
|
||||||
|
if (next == length() - 1) {
|
||||||
|
return DocIdSetIterator.NO_MORE_DOCS;
|
||||||
|
}
|
||||||
|
next = first.nextSetBit(next+1);
|
||||||
|
}
|
||||||
|
return next;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long ramBytesUsed() {
|
||||||
|
return first.ramBytesUsed();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean get(int index) {
|
||||||
|
return first.get(index) && second.get(index);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int length() {
|
||||||
|
return length;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void set(int i) {
|
||||||
|
throw new UnsupportedOperationException("not implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void clear(int i) {
|
||||||
|
throw new UnsupportedOperationException("not implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void clear(int startIndex, int endIndex) {
|
||||||
|
throw new UnsupportedOperationException("not implemented");
|
||||||
|
}
|
||||||
|
}
|
|
@ -19,6 +19,10 @@
|
||||||
|
|
||||||
package org.elasticsearch.index;
|
package org.elasticsearch.index;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
|
import org.apache.lucene.index.FilterDirectoryReader;
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import org.apache.lucene.index.LeafReader;
|
||||||
import org.apache.lucene.search.similarities.BM25Similarity;
|
import org.apache.lucene.search.similarities.BM25Similarity;
|
||||||
import org.apache.lucene.search.similarities.Similarity;
|
import org.apache.lucene.search.similarities.Similarity;
|
||||||
import org.apache.lucene.store.MMapDirectory;
|
import org.apache.lucene.store.MMapDirectory;
|
||||||
|
@ -26,6 +30,7 @@ import org.apache.lucene.util.Constants;
|
||||||
import org.apache.lucene.util.SetOnce;
|
import org.apache.lucene.util.SetOnce;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.client.Client;
|
import org.elasticsearch.client.Client;
|
||||||
|
import org.elasticsearch.common.CheckedFunction;
|
||||||
import org.elasticsearch.common.TriFunction;
|
import org.elasticsearch.common.TriFunction;
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
|
@ -38,10 +43,10 @@ import org.elasticsearch.index.analysis.AnalysisRegistry;
|
||||||
import org.elasticsearch.index.cache.query.DisabledQueryCache;
|
import org.elasticsearch.index.cache.query.DisabledQueryCache;
|
||||||
import org.elasticsearch.index.cache.query.IndexQueryCache;
|
import org.elasticsearch.index.cache.query.IndexQueryCache;
|
||||||
import org.elasticsearch.index.cache.query.QueryCache;
|
import org.elasticsearch.index.cache.query.QueryCache;
|
||||||
|
import org.elasticsearch.index.engine.Engine;
|
||||||
import org.elasticsearch.index.engine.EngineFactory;
|
import org.elasticsearch.index.engine.EngineFactory;
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.index.shard.IndexEventListener;
|
import org.elasticsearch.index.shard.IndexEventListener;
|
||||||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
|
||||||
import org.elasticsearch.index.shard.IndexingOperationListener;
|
import org.elasticsearch.index.shard.IndexingOperationListener;
|
||||||
import org.elasticsearch.index.shard.SearchOperationListener;
|
import org.elasticsearch.index.shard.SearchOperationListener;
|
||||||
import org.elasticsearch.index.similarity.SimilarityService;
|
import org.elasticsearch.index.similarity.SimilarityService;
|
||||||
|
@ -112,7 +117,8 @@ public final class IndexModule {
|
||||||
private final IndexSettings indexSettings;
|
private final IndexSettings indexSettings;
|
||||||
private final AnalysisRegistry analysisRegistry;
|
private final AnalysisRegistry analysisRegistry;
|
||||||
private final EngineFactory engineFactory;
|
private final EngineFactory engineFactory;
|
||||||
private SetOnce<IndexSearcherWrapperFactory> indexSearcherWrapper = new SetOnce<>();
|
private SetOnce<Function<IndexService,
|
||||||
|
CheckedFunction<DirectoryReader, DirectoryReader, IOException>>> indexReaderWrapper = new SetOnce<>();
|
||||||
private final Set<IndexEventListener> indexEventListeners = new HashSet<>();
|
private final Set<IndexEventListener> indexEventListeners = new HashSet<>();
|
||||||
private final Map<String, TriFunction<Settings, Version, ScriptService, Similarity>> similarities = new HashMap<>();
|
private final Map<String, TriFunction<Settings, Version, ScriptService, Similarity>> similarities = new HashMap<>();
|
||||||
private final Map<String, IndexStorePlugin.DirectoryFactory> directoryFactories;
|
private final Map<String, IndexStorePlugin.DirectoryFactory> directoryFactories;
|
||||||
|
@ -277,13 +283,26 @@ public final class IndexModule {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets a {@link org.elasticsearch.index.IndexModule.IndexSearcherWrapperFactory} that is called once the IndexService
|
* Sets the factory for creating new {@link DirectoryReader} wrapper instances.
|
||||||
* is fully constructed.
|
* The factory ({@link Function}) is called once the IndexService is fully constructed.
|
||||||
* Note: this method can only be called once per index. Multiple wrappers are not supported.
|
* NOTE: this method can only be called once per index. Multiple wrappers are not supported.
|
||||||
|
* <p>
|
||||||
|
* The {@link CheckedFunction} is invoked each time a {@link Engine.Searcher} is requested to do an operation,
|
||||||
|
* for example search, and must return a new directory reader wrapping the provided directory reader or if no
|
||||||
|
* wrapping was performed the provided directory reader.
|
||||||
|
* The wrapped reader can filter out document just like delete documents etc. but must not change any term or
|
||||||
|
* document content.
|
||||||
|
* NOTE: The index reader wrapper ({@link CheckedFunction}) has a per-request lifecycle,
|
||||||
|
* must delegate {@link IndexReader#getReaderCacheHelper()}, {@link LeafReader#getCoreCacheHelper()}
|
||||||
|
* and must be an instance of {@link FilterDirectoryReader} that eventually exposes the original reader
|
||||||
|
* via {@link FilterDirectoryReader#getDelegate()}.
|
||||||
|
* The returned reader is closed once it goes out of scope.
|
||||||
|
* </p>
|
||||||
*/
|
*/
|
||||||
public void setSearcherWrapper(IndexSearcherWrapperFactory indexSearcherWrapperFactory) {
|
public void setReaderWrapper(Function<IndexService,
|
||||||
|
CheckedFunction<DirectoryReader, DirectoryReader, IOException>> indexReaderWrapperFactory) {
|
||||||
ensureNotFrozen();
|
ensureNotFrozen();
|
||||||
this.indexSearcherWrapper.set(indexSearcherWrapperFactory);
|
this.indexReaderWrapper.set(indexReaderWrapperFactory);
|
||||||
}
|
}
|
||||||
|
|
||||||
IndexEventListener freeze() { // pkg private for testing
|
IndexEventListener freeze() { // pkg private for testing
|
||||||
|
@ -348,16 +367,6 @@ public final class IndexModule {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Factory for creating new {@link IndexSearcherWrapper} instances
|
|
||||||
*/
|
|
||||||
public interface IndexSearcherWrapperFactory {
|
|
||||||
/**
|
|
||||||
* Returns a new IndexSearcherWrapper. This method is called once per index per node
|
|
||||||
*/
|
|
||||||
IndexSearcherWrapper newWrapper(IndexService indexService);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Type defaultStoreType(final boolean allowMmap) {
|
public static Type defaultStoreType(final boolean allowMmap) {
|
||||||
if (allowMmap && Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) {
|
if (allowMmap && Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) {
|
||||||
return Type.HYBRIDFS;
|
return Type.HYBRIDFS;
|
||||||
|
@ -384,8 +393,8 @@ public final class IndexModule {
|
||||||
NamedWriteableRegistry namedWriteableRegistry)
|
NamedWriteableRegistry namedWriteableRegistry)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
final IndexEventListener eventListener = freeze();
|
final IndexEventListener eventListener = freeze();
|
||||||
IndexSearcherWrapperFactory searcherWrapperFactory = indexSearcherWrapper.get() == null
|
Function<IndexService, CheckedFunction<DirectoryReader, DirectoryReader, IOException>> readerWrapperFactory =
|
||||||
? (shard) -> null : indexSearcherWrapper.get();
|
indexReaderWrapper.get() == null ? (shard) -> null : indexReaderWrapper.get();
|
||||||
eventListener.beforeIndexCreated(indexSettings.getIndex(), indexSettings.getSettings());
|
eventListener.beforeIndexCreated(indexSettings.getIndex(), indexSettings.getSettings());
|
||||||
final IndexStorePlugin.DirectoryFactory directoryFactory = getDirectoryFactory(indexSettings, directoryFactories);
|
final IndexStorePlugin.DirectoryFactory directoryFactory = getDirectoryFactory(indexSettings, directoryFactories);
|
||||||
final QueryCache queryCache;
|
final QueryCache queryCache;
|
||||||
|
@ -402,7 +411,7 @@ public final class IndexModule {
|
||||||
return new IndexService(indexSettings, indexCreationContext, environment, xContentRegistry,
|
return new IndexService(indexSettings, indexCreationContext, environment, xContentRegistry,
|
||||||
new SimilarityService(indexSettings, scriptService, similarities),
|
new SimilarityService(indexSettings, scriptService, similarities),
|
||||||
shardStoreDeleter, analysisRegistry, engineFactory, circuitBreakerService, bigArrays, threadPool, scriptService,
|
shardStoreDeleter, analysisRegistry, engineFactory, circuitBreakerService, bigArrays, threadPool, scriptService,
|
||||||
client, queryCache, directoryFactory, eventListener, searcherWrapperFactory, mapperRegistry,
|
client, queryCache, directoryFactory, eventListener, readerWrapperFactory, mapperRegistry,
|
||||||
indicesFieldDataCache, searchOperationListeners, indexOperationListeners, namedWriteableRegistry);
|
indicesFieldDataCache, searchOperationListeners, indexOperationListeners, namedWriteableRegistry);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
package org.elasticsearch.index;
|
package org.elasticsearch.index;
|
||||||
|
|
||||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexReaderContext;
|
import org.apache.lucene.index.IndexReaderContext;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
|
@ -32,6 +33,7 @@ import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.client.Client;
|
import org.elasticsearch.client.Client;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
|
import org.elasticsearch.common.CheckedFunction;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
|
@ -59,7 +61,6 @@ import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.index.query.QueryShardContext;
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
import org.elasticsearch.index.seqno.RetentionLeaseSyncer;
|
import org.elasticsearch.index.seqno.RetentionLeaseSyncer;
|
||||||
import org.elasticsearch.index.shard.IndexEventListener;
|
import org.elasticsearch.index.shard.IndexEventListener;
|
||||||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
|
||||||
import org.elasticsearch.index.shard.IndexShard;
|
import org.elasticsearch.index.shard.IndexShard;
|
||||||
import org.elasticsearch.index.shard.IndexShardClosedException;
|
import org.elasticsearch.index.shard.IndexShardClosedException;
|
||||||
import org.elasticsearch.index.shard.IndexingOperationListener;
|
import org.elasticsearch.index.shard.IndexingOperationListener;
|
||||||
|
@ -91,6 +92,7 @@ import java.util.Set;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
import java.util.function.Function;
|
||||||
import java.util.function.LongSupplier;
|
import java.util.function.LongSupplier;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
|
@ -106,7 +108,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
|
||||||
private final NodeEnvironment nodeEnv;
|
private final NodeEnvironment nodeEnv;
|
||||||
private final ShardStoreDeleter shardStoreDeleter;
|
private final ShardStoreDeleter shardStoreDeleter;
|
||||||
private final IndexStorePlugin.DirectoryFactory directoryFactory;
|
private final IndexStorePlugin.DirectoryFactory directoryFactory;
|
||||||
private final IndexSearcherWrapper searcherWrapper;
|
private final CheckedFunction<DirectoryReader, DirectoryReader, IOException> readerWrapper;
|
||||||
private final IndexCache indexCache;
|
private final IndexCache indexCache;
|
||||||
private final MapperService mapperService;
|
private final MapperService mapperService;
|
||||||
private final NamedXContentRegistry xContentRegistry;
|
private final NamedXContentRegistry xContentRegistry;
|
||||||
|
@ -153,7 +155,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
|
||||||
QueryCache queryCache,
|
QueryCache queryCache,
|
||||||
IndexStorePlugin.DirectoryFactory directoryFactory,
|
IndexStorePlugin.DirectoryFactory directoryFactory,
|
||||||
IndexEventListener eventListener,
|
IndexEventListener eventListener,
|
||||||
IndexModule.IndexSearcherWrapperFactory wrapperFactory,
|
Function<IndexService, CheckedFunction<DirectoryReader, DirectoryReader, IOException>> wrapperFactory,
|
||||||
MapperRegistry mapperRegistry,
|
MapperRegistry mapperRegistry,
|
||||||
IndicesFieldDataCache indicesFieldDataCache,
|
IndicesFieldDataCache indicesFieldDataCache,
|
||||||
List<SearchOperationListener> searchOperationListeners,
|
List<SearchOperationListener> searchOperationListeners,
|
||||||
|
@ -205,7 +207,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
|
||||||
this.directoryFactory = directoryFactory;
|
this.directoryFactory = directoryFactory;
|
||||||
this.engineFactory = Objects.requireNonNull(engineFactory);
|
this.engineFactory = Objects.requireNonNull(engineFactory);
|
||||||
// initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE
|
// initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE
|
||||||
this.searcherWrapper = wrapperFactory.newWrapper(this);
|
this.readerWrapper = wrapperFactory.apply(this);
|
||||||
this.searchOperationListeners = Collections.unmodifiableList(searchOperationListeners);
|
this.searchOperationListeners = Collections.unmodifiableList(searchOperationListeners);
|
||||||
this.indexingOperationListeners = Collections.unmodifiableList(indexingOperationListeners);
|
this.indexingOperationListeners = Collections.unmodifiableList(indexingOperationListeners);
|
||||||
// kick off async ops for the first shard in this index
|
// kick off async ops for the first shard in this index
|
||||||
|
@ -418,7 +420,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
|
||||||
similarityService,
|
similarityService,
|
||||||
engineFactory,
|
engineFactory,
|
||||||
eventListener,
|
eventListener,
|
||||||
searcherWrapper,
|
readerWrapper,
|
||||||
threadPool,
|
threadPool,
|
||||||
bigArrays,
|
bigArrays,
|
||||||
engineWarmer,
|
engineWarmer,
|
||||||
|
@ -756,8 +758,8 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
|
||||||
return engineFactory;
|
return engineFactory;
|
||||||
}
|
}
|
||||||
|
|
||||||
final IndexSearcherWrapper getSearcherWrapper() {
|
final CheckedFunction<DirectoryReader, DirectoryReader, IOException> getReaderWrapper() {
|
||||||
return searcherWrapper;
|
return readerWrapper;
|
||||||
} // pkg private for testing
|
} // pkg private for testing
|
||||||
|
|
||||||
final IndexStorePlugin.DirectoryFactory getDirectoryFactory() {
|
final IndexStorePlugin.DirectoryFactory getDirectoryFactory() {
|
||||||
|
|
|
@ -1,138 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.index.shard;
|
|
||||||
|
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
|
||||||
import org.apache.lucene.index.FilterDirectoryReader;
|
|
||||||
import org.apache.lucene.index.IndexReader;
|
|
||||||
import org.apache.lucene.index.LeafReader;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
|
||||||
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
|
||||||
import org.elasticsearch.core.internal.io.IOUtils;
|
|
||||||
import org.elasticsearch.index.engine.Engine;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extension point to add custom functionality at request time to the {@link DirectoryReader}
|
|
||||||
* and {@link IndexSearcher} managed by the {@link IndexShard}.
|
|
||||||
*/
|
|
||||||
public class IndexSearcherWrapper {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Wraps the given {@link DirectoryReader}. The wrapped reader can filter out document just like delete documents etc. but
|
|
||||||
* must not change any term or document content.
|
|
||||||
* <p>
|
|
||||||
* NOTE: The wrapper has a per-request lifecycle, must delegate {@link IndexReader#getReaderCacheHelper()},
|
|
||||||
* {@link LeafReader#getCoreCacheHelper()} and must be an instance of {@link FilterDirectoryReader} that
|
|
||||||
* eventually exposes the original reader via {@link FilterDirectoryReader#getDelegate()}.
|
|
||||||
* The returned reader is closed once it goes out of scope.
|
|
||||||
* </p>
|
|
||||||
* @param reader The provided directory reader to be wrapped to add custom functionality
|
|
||||||
* @return a new directory reader wrapping the provided directory reader or if no wrapping was performed
|
|
||||||
* the provided directory reader
|
|
||||||
*/
|
|
||||||
protected DirectoryReader wrap(DirectoryReader reader) throws IOException {
|
|
||||||
return reader;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param searcher The provided index searcher to be wrapped to add custom functionality
|
|
||||||
* @return a new index searcher wrapping the provided index searcher or if no wrapping was performed
|
|
||||||
* the provided index searcher
|
|
||||||
*/
|
|
||||||
protected IndexSearcher wrap(IndexSearcher searcher) throws IOException {
|
|
||||||
return searcher;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* If there are configured {@link IndexSearcherWrapper} instances, the {@link IndexSearcher} of the provided engine searcher
|
|
||||||
* gets wrapped and a new {@link Engine.Searcher} instances is returned, otherwise the provided {@link Engine.Searcher} is returned.
|
|
||||||
*
|
|
||||||
* This is invoked each time a {@link Engine.Searcher} is requested to do an operation. (for example search)
|
|
||||||
*/
|
|
||||||
public final Engine.Searcher wrap(Engine.Searcher engineSearcher) throws IOException {
|
|
||||||
final ElasticsearchDirectoryReader elasticsearchDirectoryReader =
|
|
||||||
ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(engineSearcher.getDirectoryReader());
|
|
||||||
if (elasticsearchDirectoryReader == null) {
|
|
||||||
throw new IllegalStateException("Can't wrap non elasticsearch directory reader");
|
|
||||||
}
|
|
||||||
NonClosingReaderWrapper nonClosingReaderWrapper = new NonClosingReaderWrapper(engineSearcher.getDirectoryReader());
|
|
||||||
DirectoryReader reader = wrap(nonClosingReaderWrapper);
|
|
||||||
if (reader != nonClosingReaderWrapper) {
|
|
||||||
if (reader.getReaderCacheHelper() != elasticsearchDirectoryReader.getReaderCacheHelper()) {
|
|
||||||
throw new IllegalStateException("wrapped directory reader doesn't delegate IndexReader#getCoreCacheKey," +
|
|
||||||
" wrappers must override this method and delegate to the original readers core cache key. Wrapped readers can't be " +
|
|
||||||
"used as cache keys since their are used only per request which would lead to subtle bugs");
|
|
||||||
}
|
|
||||||
if (ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(reader) != elasticsearchDirectoryReader) {
|
|
||||||
// prevent that somebody wraps with a non-filter reader
|
|
||||||
throw new IllegalStateException("wrapped directory reader hides actual ElasticsearchDirectoryReader but shouldn't");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
final IndexSearcher origIndexSearcher = engineSearcher.searcher();
|
|
||||||
final IndexSearcher innerIndexSearcher = new IndexSearcher(reader);
|
|
||||||
innerIndexSearcher.setQueryCache(origIndexSearcher.getQueryCache());
|
|
||||||
innerIndexSearcher.setQueryCachingPolicy(origIndexSearcher.getQueryCachingPolicy());
|
|
||||||
innerIndexSearcher.setSimilarity(origIndexSearcher.getSimilarity());
|
|
||||||
// TODO: Right now IndexSearcher isn't wrapper friendly, when it becomes wrapper friendly we should revise this extension point
|
|
||||||
// For example if IndexSearcher#rewrite() is overwritten than also IndexSearcher#createNormalizedWeight needs to be overwritten
|
|
||||||
// This needs to be fixed before we can allow the IndexSearcher from Engine to be wrapped multiple times
|
|
||||||
final IndexSearcher indexSearcher = wrap(innerIndexSearcher);
|
|
||||||
if (reader == nonClosingReaderWrapper && indexSearcher == innerIndexSearcher) {
|
|
||||||
return engineSearcher;
|
|
||||||
} else {
|
|
||||||
// we close the reader to make sure wrappers can release resources if needed....
|
|
||||||
// our NonClosingReaderWrapper makes sure that our reader is not closed
|
|
||||||
return new Engine.Searcher(engineSearcher.source(), indexSearcher, () ->
|
|
||||||
IOUtils.close(indexSearcher.getIndexReader(), // this will close the wrappers excluding the NonClosingReaderWrapper
|
|
||||||
engineSearcher)); // this will run the closeable on the wrapped engine searcher
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static final class NonClosingReaderWrapper extends FilterDirectoryReader {
|
|
||||||
|
|
||||||
private NonClosingReaderWrapper(DirectoryReader in) throws IOException {
|
|
||||||
super(in, new SubReaderWrapper() {
|
|
||||||
@Override
|
|
||||||
public LeafReader wrap(LeafReader reader) {
|
|
||||||
return reader;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
|
|
||||||
return new NonClosingReaderWrapper(in);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void doClose() throws IOException {
|
|
||||||
// don't close here - mimic the MultiReader#doClose = false behavior that FilterDirectoryReader doesn't have
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CacheHelper getReaderCacheHelper() {
|
|
||||||
return in.getReaderCacheHelper();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -23,9 +23,13 @@ import com.carrotsearch.hppc.ObjectLongMap;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||||
import org.apache.lucene.index.CheckIndex;
|
import org.apache.lucene.index.CheckIndex;
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
|
import org.apache.lucene.index.FilterDirectoryReader;
|
||||||
import org.apache.lucene.index.IndexCommit;
|
import org.apache.lucene.index.IndexCommit;
|
||||||
|
import org.apache.lucene.index.LeafReader;
|
||||||
import org.apache.lucene.index.SegmentInfos;
|
import org.apache.lucene.index.SegmentInfos;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.QueryCachingPolicy;
|
import org.apache.lucene.search.QueryCachingPolicy;
|
||||||
import org.apache.lucene.search.ReferenceManager;
|
import org.apache.lucene.search.ReferenceManager;
|
||||||
|
@ -50,6 +54,7 @@ import org.elasticsearch.cluster.routing.RecoverySource;
|
||||||
import org.elasticsearch.cluster.routing.RecoverySource.SnapshotRecoverySource;
|
import org.elasticsearch.cluster.routing.RecoverySource.SnapshotRecoverySource;
|
||||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
import org.elasticsearch.common.Booleans;
|
import org.elasticsearch.common.Booleans;
|
||||||
|
import org.elasticsearch.common.CheckedFunction;
|
||||||
import org.elasticsearch.common.CheckedRunnable;
|
import org.elasticsearch.common.CheckedRunnable;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
|
@ -243,7 +248,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||||
private static final EnumSet<IndexShardState> writeAllowedStates = EnumSet.of(IndexShardState.RECOVERING,
|
private static final EnumSet<IndexShardState> writeAllowedStates = EnumSet.of(IndexShardState.RECOVERING,
|
||||||
IndexShardState.POST_RECOVERY, IndexShardState.STARTED);
|
IndexShardState.POST_RECOVERY, IndexShardState.STARTED);
|
||||||
|
|
||||||
private final IndexSearcherWrapper searcherWrapper;
|
private final CheckedFunction<DirectoryReader, DirectoryReader, IOException> readerWrapper;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* True if this shard is still indexing (recently) and false if we've been idle for long enough (as periodically checked by {@link
|
* True if this shard is still indexing (recently) and false if we've been idle for long enough (as periodically checked by {@link
|
||||||
|
@ -269,7 +274,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||||
final SimilarityService similarityService,
|
final SimilarityService similarityService,
|
||||||
final @Nullable EngineFactory engineFactory,
|
final @Nullable EngineFactory engineFactory,
|
||||||
final IndexEventListener indexEventListener,
|
final IndexEventListener indexEventListener,
|
||||||
final IndexSearcherWrapper indexSearcherWrapper,
|
final CheckedFunction<DirectoryReader, DirectoryReader, IOException> indexReaderWrapper,
|
||||||
final ThreadPool threadPool,
|
final ThreadPool threadPool,
|
||||||
final BigArrays bigArrays,
|
final BigArrays bigArrays,
|
||||||
final Engine.Warmer warmer,
|
final Engine.Warmer warmer,
|
||||||
|
@ -349,7 +354,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||||
cachingPolicy = new UsageTrackingQueryCachingPolicy();
|
cachingPolicy = new UsageTrackingQueryCachingPolicy();
|
||||||
}
|
}
|
||||||
indexShardOperationPermits = new IndexShardOperationPermits(shardId, threadPool);
|
indexShardOperationPermits = new IndexShardOperationPermits(shardId, threadPool);
|
||||||
searcherWrapper = indexSearcherWrapper;
|
readerWrapper = indexReaderWrapper;
|
||||||
refreshListeners = buildRefreshListeners();
|
refreshListeners = buildRefreshListeners();
|
||||||
lastSearcherAccess.set(threadPool.relativeTimeInMillis());
|
lastSearcherAccess.set(threadPool.relativeTimeInMillis());
|
||||||
persistMetadata(path, indexSettings, shardRouting, null, logger);
|
persistMetadata(path, indexSettings, shardRouting, null, logger);
|
||||||
|
@ -1230,7 +1235,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||||
!= null : "DirectoryReader must be an instance or ElasticsearchDirectoryReader";
|
!= null : "DirectoryReader must be an instance or ElasticsearchDirectoryReader";
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
try {
|
try {
|
||||||
final Engine.Searcher wrappedSearcher = searcherWrapper == null ? searcher : searcherWrapper.wrap(searcher);
|
final Engine.Searcher wrappedSearcher = readerWrapper == null ? searcher : wrapSearcher(searcher, readerWrapper);
|
||||||
assert wrappedSearcher != null;
|
assert wrappedSearcher != null;
|
||||||
success = true;
|
success = true;
|
||||||
return wrappedSearcher;
|
return wrappedSearcher;
|
||||||
|
@ -1243,6 +1248,72 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static Engine.Searcher wrapSearcher(Engine.Searcher engineSearcher,
|
||||||
|
CheckedFunction<DirectoryReader, DirectoryReader, IOException> readerWrapper) throws IOException {
|
||||||
|
assert readerWrapper != null;
|
||||||
|
final ElasticsearchDirectoryReader elasticsearchDirectoryReader =
|
||||||
|
ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(engineSearcher.getDirectoryReader());
|
||||||
|
if (elasticsearchDirectoryReader == null) {
|
||||||
|
throw new IllegalStateException("Can't wrap non elasticsearch directory reader");
|
||||||
|
}
|
||||||
|
NonClosingReaderWrapper nonClosingReaderWrapper = new NonClosingReaderWrapper(engineSearcher.getDirectoryReader());
|
||||||
|
DirectoryReader reader = readerWrapper.apply(nonClosingReaderWrapper);
|
||||||
|
if (reader != nonClosingReaderWrapper) {
|
||||||
|
if (reader.getReaderCacheHelper() != elasticsearchDirectoryReader.getReaderCacheHelper()) {
|
||||||
|
throw new IllegalStateException("wrapped directory reader doesn't delegate IndexReader#getCoreCacheKey," +
|
||||||
|
" wrappers must override this method and delegate to the original readers core cache key. Wrapped readers can't be " +
|
||||||
|
"used as cache keys since their are used only per request which would lead to subtle bugs");
|
||||||
|
}
|
||||||
|
if (ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(reader) != elasticsearchDirectoryReader) {
|
||||||
|
// prevent that somebody wraps with a non-filter reader
|
||||||
|
throw new IllegalStateException("wrapped directory reader hides actual ElasticsearchDirectoryReader but shouldn't");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (reader == nonClosingReaderWrapper) {
|
||||||
|
return engineSearcher;
|
||||||
|
} else {
|
||||||
|
final IndexSearcher origIndexSearcher = engineSearcher.searcher();
|
||||||
|
final IndexSearcher newIndexSearcher = new IndexSearcher(reader);
|
||||||
|
newIndexSearcher.setQueryCache(origIndexSearcher.getQueryCache());
|
||||||
|
newIndexSearcher.setQueryCachingPolicy(origIndexSearcher.getQueryCachingPolicy());
|
||||||
|
newIndexSearcher.setSimilarity(origIndexSearcher.getSimilarity());
|
||||||
|
// we close the reader to make sure wrappers can release resources if needed....
|
||||||
|
// our NonClosingReaderWrapper makes sure that our reader is not closed
|
||||||
|
return new Engine.Searcher(engineSearcher.source(), newIndexSearcher, () ->
|
||||||
|
IOUtils.close(newIndexSearcher.getIndexReader(), // this will close the wrappers excluding the NonClosingReaderWrapper
|
||||||
|
engineSearcher)); // this will run the closeable on the wrapped engine searcher
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final class NonClosingReaderWrapper extends FilterDirectoryReader {
|
||||||
|
|
||||||
|
private NonClosingReaderWrapper(DirectoryReader in) throws IOException {
|
||||||
|
super(in, new SubReaderWrapper() {
|
||||||
|
@Override
|
||||||
|
public LeafReader wrap(LeafReader reader) {
|
||||||
|
return reader;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
|
||||||
|
return new NonClosingReaderWrapper(in);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doClose() throws IOException {
|
||||||
|
// don't close here - mimic the MultiReader#doClose = false behavior that FilterDirectoryReader doesn't have
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheHelper getReaderCacheHelper() {
|
||||||
|
return in.getReaderCacheHelper();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
public void close(String reason, boolean flushEngine) throws IOException {
|
public void close(String reason, boolean flushEngine) throws IOException {
|
||||||
synchronized (mutex) {
|
synchronized (mutex) {
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -175,7 +175,8 @@ final class DefaultSearchContext extends SearchContext {
|
||||||
this.indexShard = indexShard;
|
this.indexShard = indexShard;
|
||||||
this.indexService = indexService;
|
this.indexService = indexService;
|
||||||
this.clusterService = clusterService;
|
this.clusterService = clusterService;
|
||||||
this.searcher = new ContextIndexSearcher(engineSearcher, indexService.cache().query(), indexShard.getQueryCachingPolicy());
|
this.searcher = new ContextIndexSearcher(engineSearcher.reader(), engineSearcher.searcher().getSimilarity(),
|
||||||
|
indexService.cache().query(), indexShard.getQueryCachingPolicy());
|
||||||
this.relativeTimeSupplier = relativeTimeSupplier;
|
this.relativeTimeSupplier = relativeTimeSupplier;
|
||||||
this.timeout = timeout;
|
this.timeout = timeout;
|
||||||
this.minNodeVersion = minNodeVersion;
|
this.minNodeVersion = minNodeVersion;
|
||||||
|
|
|
@ -20,14 +20,19 @@
|
||||||
package org.elasticsearch.search.internal;
|
package org.elasticsearch.search.internal;
|
||||||
|
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.index.TermStates;
|
import org.apache.lucene.index.TermStates;
|
||||||
import org.apache.lucene.search.BulkScorer;
|
import org.apache.lucene.search.BulkScorer;
|
||||||
import org.apache.lucene.search.CollectionStatistics;
|
import org.apache.lucene.search.CollectionStatistics;
|
||||||
|
import org.apache.lucene.search.CollectionTerminatedException;
|
||||||
import org.apache.lucene.search.Collector;
|
import org.apache.lucene.search.Collector;
|
||||||
|
import org.apache.lucene.search.ConjunctionDISI;
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.Explanation;
|
import org.apache.lucene.search.Explanation;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
|
import org.apache.lucene.search.LeafCollector;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.QueryCache;
|
import org.apache.lucene.search.QueryCache;
|
||||||
import org.apache.lucene.search.QueryCachingPolicy;
|
import org.apache.lucene.search.QueryCachingPolicy;
|
||||||
|
@ -35,9 +40,13 @@ import org.apache.lucene.search.ScoreMode;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.search.TermStatistics;
|
import org.apache.lucene.search.TermStatistics;
|
||||||
import org.apache.lucene.search.Weight;
|
import org.apache.lucene.search.Weight;
|
||||||
import org.apache.lucene.search.XIndexSearcher;
|
import org.apache.lucene.search.similarities.Similarity;
|
||||||
|
import org.apache.lucene.util.BitSet;
|
||||||
|
import org.apache.lucene.util.BitSetIterator;
|
||||||
|
import org.apache.lucene.util.Bits;
|
||||||
|
import org.apache.lucene.util.CombinedBitSet;
|
||||||
|
import org.apache.lucene.util.SparseFixedBitSet;
|
||||||
import org.elasticsearch.common.lease.Releasable;
|
import org.elasticsearch.common.lease.Releasable;
|
||||||
import org.elasticsearch.index.engine.Engine;
|
|
||||||
import org.elasticsearch.search.dfs.AggregatedDfs;
|
import org.elasticsearch.search.dfs.AggregatedDfs;
|
||||||
import org.elasticsearch.search.profile.Timer;
|
import org.elasticsearch.search.profile.Timer;
|
||||||
import org.elasticsearch.search.profile.query.ProfileWeight;
|
import org.elasticsearch.search.profile.query.ProfileWeight;
|
||||||
|
@ -46,6 +55,7 @@ import org.elasticsearch.search.profile.query.QueryProfiler;
|
||||||
import org.elasticsearch.search.profile.query.QueryTimingType;
|
import org.elasticsearch.search.profile.query.QueryTimingType;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
@ -53,26 +63,19 @@ import java.util.Set;
|
||||||
* Context-aware extension of {@link IndexSearcher}.
|
* Context-aware extension of {@link IndexSearcher}.
|
||||||
*/
|
*/
|
||||||
public class ContextIndexSearcher extends IndexSearcher implements Releasable {
|
public class ContextIndexSearcher extends IndexSearcher implements Releasable {
|
||||||
|
/**
|
||||||
/** The wrapped {@link IndexSearcher}. The reason why we sometimes prefer delegating to this searcher instead of {@code super} is that
|
* The interval at which we check for search cancellation when we cannot use
|
||||||
* this instance may have more assertions, for example if it comes from MockInternalEngine which wraps the IndexSearcher into an
|
* a {@link CancellableBulkScorer}. See {@link #intersectScorerAndBitSet}.
|
||||||
* AssertingIndexSearcher. */
|
*/
|
||||||
private final XIndexSearcher in;
|
private static int CHECK_CANCELLED_SCORER_INTERVAL = 1 << 11;
|
||||||
|
|
||||||
private AggregatedDfs aggregatedDfs;
|
private AggregatedDfs aggregatedDfs;
|
||||||
|
|
||||||
private final Engine.Searcher engineSearcher;
|
|
||||||
|
|
||||||
// TODO revisit moving the profiler to inheritance or wrapping model in the future
|
|
||||||
private QueryProfiler profiler;
|
private QueryProfiler profiler;
|
||||||
|
|
||||||
private Runnable checkCancelled;
|
private Runnable checkCancelled;
|
||||||
|
|
||||||
public ContextIndexSearcher(Engine.Searcher searcher, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy) {
|
public ContextIndexSearcher(IndexReader reader, Similarity similarity, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy) {
|
||||||
super(searcher.reader());
|
super(reader);
|
||||||
engineSearcher = searcher;
|
setSimilarity(similarity);
|
||||||
in = new XIndexSearcher(searcher.searcher());
|
|
||||||
setSimilarity(searcher.searcher().getSimilarity());
|
|
||||||
setQueryCache(queryCache);
|
setQueryCache(queryCache);
|
||||||
setQueryCachingPolicy(queryCachingPolicy);
|
setQueryCachingPolicy(queryCachingPolicy);
|
||||||
}
|
}
|
||||||
|
@ -104,7 +107,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return in.rewrite(original);
|
return super.rewrite(original);
|
||||||
} finally {
|
} finally {
|
||||||
if (profiler != null) {
|
if (profiler != null) {
|
||||||
profiler.stopAndAddRewriteTime();
|
profiler.stopAndAddRewriteTime();
|
||||||
|
@ -130,7 +133,6 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
|
||||||
}
|
}
|
||||||
return new ProfileWeight(query, weight, profile);
|
return new ProfileWeight(query, weight, profile);
|
||||||
} else {
|
} else {
|
||||||
// needs to be 'super', not 'in' in order to use aggregated DFS
|
|
||||||
return super.createWeight(query, scoreMode, boost);
|
return super.createWeight(query, scoreMode, boost);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -158,7 +160,6 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||||
// in case the wrapped searcher (in) uses the scorer directly
|
|
||||||
return weight.scorer(context);
|
return weight.scorer(context);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -175,16 +176,75 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
|
||||||
} else {
|
} else {
|
||||||
cancellableWeight = weight;
|
cancellableWeight = weight;
|
||||||
}
|
}
|
||||||
in.search(leaves, cancellableWeight, collector);
|
searchInternal(leaves, cancellableWeight, collector);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
private void searchInternal(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
|
||||||
public Explanation explain(Query query, int doc) throws IOException {
|
for (LeafReaderContext ctx : leaves) { // search each subreader
|
||||||
if (aggregatedDfs != null) {
|
final LeafCollector leafCollector;
|
||||||
// dfs data is needed to explain the score
|
try {
|
||||||
return super.explain(createWeight(rewrite(query), ScoreMode.COMPLETE, 1f), doc);
|
leafCollector = collector.getLeafCollector(ctx);
|
||||||
|
} catch (CollectionTerminatedException e) {
|
||||||
|
// there is no doc of interest in this reader context
|
||||||
|
// continue with the following leaf
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Bits liveDocs = ctx.reader().getLiveDocs();
|
||||||
|
BitSet liveDocsBitSet = getSparseBitSetOrNull(ctx.reader().getLiveDocs());
|
||||||
|
if (liveDocsBitSet == null) {
|
||||||
|
BulkScorer bulkScorer = weight.bulkScorer(ctx);
|
||||||
|
if (bulkScorer != null) {
|
||||||
|
try {
|
||||||
|
bulkScorer.score(leafCollector, liveDocs);
|
||||||
|
} catch (CollectionTerminatedException e) {
|
||||||
|
// collection was terminated prematurely
|
||||||
|
// continue with the following leaf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// if the role query result set is sparse then we should use the SparseFixedBitSet for advancing:
|
||||||
|
Scorer scorer = weight.scorer(ctx);
|
||||||
|
if (scorer != null) {
|
||||||
|
try {
|
||||||
|
intersectScorerAndBitSet(scorer, liveDocsBitSet, leafCollector,
|
||||||
|
checkCancelled == null ? () -> {} : checkCancelled);
|
||||||
|
} catch (CollectionTerminatedException e) {
|
||||||
|
// collection was terminated prematurely
|
||||||
|
// continue with the following leaf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return in.explain(query, doc);
|
}
|
||||||
|
|
||||||
|
private static BitSet getSparseBitSetOrNull(Bits liveDocs) {
|
||||||
|
if (liveDocs instanceof SparseFixedBitSet) {
|
||||||
|
return (BitSet) liveDocs;
|
||||||
|
} else if (liveDocs instanceof CombinedBitSet
|
||||||
|
// if the underlying role bitset is sparse
|
||||||
|
&& ((CombinedBitSet) liveDocs).getFirst() instanceof SparseFixedBitSet) {
|
||||||
|
return (BitSet) liveDocs;
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
static void intersectScorerAndBitSet(Scorer scorer, BitSet acceptDocs,
|
||||||
|
LeafCollector collector, Runnable checkCancelled) throws IOException {
|
||||||
|
// ConjunctionDISI uses the DocIdSetIterator#cost() to order the iterators, so if roleBits has the lowest cardinality it should
|
||||||
|
// be used first:
|
||||||
|
DocIdSetIterator iterator = ConjunctionDISI.intersectIterators(Arrays.asList(new BitSetIterator(acceptDocs,
|
||||||
|
acceptDocs.approximateCardinality()), scorer.iterator()));
|
||||||
|
int seen = 0;
|
||||||
|
checkCancelled.run();
|
||||||
|
for (int docId = iterator.nextDoc(); docId < DocIdSetIterator.NO_MORE_DOCS; docId = iterator.nextDoc()) {
|
||||||
|
if (++seen % CHECK_CANCELLED_SCORER_INTERVAL == 0) {
|
||||||
|
checkCancelled.run();
|
||||||
|
}
|
||||||
|
collector.collect(docId);
|
||||||
|
}
|
||||||
|
checkCancelled.run();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -216,10 +276,8 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public DirectoryReader getDirectoryReader() {
|
public DirectoryReader getDirectoryReader() {
|
||||||
return engineSearcher.getDirectoryReader();
|
final IndexReader reader = getIndexReader();
|
||||||
}
|
assert reader instanceof DirectoryReader : "expected an instance of DirectoryReader, got " + reader.getClass();
|
||||||
|
return (DirectoryReader) reader;
|
||||||
public Engine.Searcher getEngineSearcher() {
|
|
||||||
return engineSearcher;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,111 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.lucene.util;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
public class CombinedBitSetTests extends ESTestCase {
|
||||||
|
public void testEmpty() {
|
||||||
|
for (float percent : new float[] {0f, 0.1f, 0.5f, 0.9f, 1f}) {
|
||||||
|
testCase(randomIntBetween(1, 10000), 0f, percent);
|
||||||
|
testCase(randomIntBetween(1, 10000), percent, 0f);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSparse() {
|
||||||
|
for (float percent : new float[] {0f, 0.1f, 0.5f, 0.9f, 1f}) {
|
||||||
|
testCase(randomIntBetween(1, 10000), 0.1f, percent);
|
||||||
|
testCase(randomIntBetween(1, 10000), percent, 0.1f);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDense() {
|
||||||
|
for (float percent : new float[] {0f, 0.1f, 0.5f, 0.9f, 1f}) {
|
||||||
|
testCase(randomIntBetween(1, 10000), 0.9f, percent);
|
||||||
|
testCase(randomIntBetween(1, 10000), percent, 0.9f);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRandom() {
|
||||||
|
int iterations = atLeast(10);
|
||||||
|
for (int i = 0; i < iterations; i++) {
|
||||||
|
testCase(randomIntBetween(1, 10000), randomFloat(), randomFloat());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void testCase(int numBits, float percent1, float percent2) {
|
||||||
|
BitSet first = randomSet(numBits, percent1);
|
||||||
|
BitSet second = randomSet(numBits, percent2);
|
||||||
|
CombinedBitSet actual = new CombinedBitSet(first, second);
|
||||||
|
FixedBitSet expected = new FixedBitSet(numBits);
|
||||||
|
or(expected, first);
|
||||||
|
and(expected, second);
|
||||||
|
assertEquals(expected.cardinality(), actual.cardinality());
|
||||||
|
assertEquals(expected, actual, numBits);
|
||||||
|
for (int i = 0; i < numBits; ++i) {
|
||||||
|
assertEquals(expected.nextSetBit(i), actual.nextSetBit(i));
|
||||||
|
assertEquals(Integer.toString(i), expected.prevSetBit(i), actual.prevSetBit(i));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void or(BitSet set1, BitSet set2) {
|
||||||
|
int next = 0;
|
||||||
|
while (next < set2.length() && (next = set2.nextSetBit(next)) != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
|
set1.set(next);
|
||||||
|
next += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void and(BitSet set1, BitSet set2) {
|
||||||
|
int next = 0;
|
||||||
|
while (next < set1.length() && (next = set1.nextSetBit(next)) != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
|
if (set2.get(next) == false) {
|
||||||
|
set1.clear(next);
|
||||||
|
}
|
||||||
|
next += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void assertEquals(BitSet set1, BitSet set2, int maxDoc) {
|
||||||
|
for (int i = 0; i < maxDoc; ++i) {
|
||||||
|
assertEquals("Different at " + i, set1.get(i), set2.get(i));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private BitSet randomSet(int numBits, float percentSet) {
|
||||||
|
return randomSet(numBits, (int) (percentSet * numBits));
|
||||||
|
}
|
||||||
|
|
||||||
|
private BitSet randomSet(int numBits, int numBitsSet) {
|
||||||
|
assert numBitsSet <= numBits;
|
||||||
|
final BitSet set = randomBoolean() ? new SparseFixedBitSet(numBits) : new FixedBitSet(numBits);
|
||||||
|
for (int i = 0; i < numBitsSet; ++i) {
|
||||||
|
while (true) {
|
||||||
|
final int o = random().nextInt(numBits);
|
||||||
|
if (set.get(o) == false) {
|
||||||
|
set.set(o);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return set;
|
||||||
|
}
|
||||||
|
}
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.index.FieldInvertState;
|
import org.apache.lucene.index.FieldInvertState;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.CollectionStatistics;
|
import org.apache.lucene.search.CollectionStatistics;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
|
||||||
import org.apache.lucene.search.QueryCachingPolicy;
|
import org.apache.lucene.search.QueryCachingPolicy;
|
||||||
import org.apache.lucene.search.TermStatistics;
|
import org.apache.lucene.search.TermStatistics;
|
||||||
import org.apache.lucene.search.Weight;
|
import org.apache.lucene.search.Weight;
|
||||||
|
@ -34,6 +33,7 @@ import org.apache.lucene.util.SetOnce.AlreadySetException;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.cluster.service.ClusterService;
|
import org.elasticsearch.cluster.service.ClusterService;
|
||||||
|
import org.elasticsearch.common.CheckedFunction;
|
||||||
import org.elasticsearch.common.breaker.CircuitBreaker;
|
import org.elasticsearch.common.breaker.CircuitBreaker;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Setting.Property;
|
import org.elasticsearch.common.settings.Setting.Property;
|
||||||
|
@ -50,14 +50,12 @@ import org.elasticsearch.index.cache.query.DisabledQueryCache;
|
||||||
import org.elasticsearch.index.cache.query.IndexQueryCache;
|
import org.elasticsearch.index.cache.query.IndexQueryCache;
|
||||||
import org.elasticsearch.index.cache.query.QueryCache;
|
import org.elasticsearch.index.cache.query.QueryCache;
|
||||||
import org.elasticsearch.index.engine.Engine;
|
import org.elasticsearch.index.engine.Engine;
|
||||||
import org.elasticsearch.index.engine.EngineException;
|
|
||||||
import org.elasticsearch.index.engine.InternalEngineFactory;
|
import org.elasticsearch.index.engine.InternalEngineFactory;
|
||||||
import org.elasticsearch.index.engine.InternalEngineTests;
|
import org.elasticsearch.index.engine.InternalEngineTests;
|
||||||
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
||||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||||
import org.elasticsearch.index.mapper.Uid;
|
import org.elasticsearch.index.mapper.Uid;
|
||||||
import org.elasticsearch.index.shard.IndexEventListener;
|
import org.elasticsearch.index.shard.IndexEventListener;
|
||||||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
|
||||||
import org.elasticsearch.index.shard.IndexingOperationListener;
|
import org.elasticsearch.index.shard.IndexingOperationListener;
|
||||||
import org.elasticsearch.index.shard.SearchOperationListener;
|
import org.elasticsearch.index.shard.SearchOperationListener;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
|
@ -159,10 +157,10 @@ public class IndexModuleTests extends ESTestCase {
|
||||||
public void testWrapperIsBound() throws IOException {
|
public void testWrapperIsBound() throws IOException {
|
||||||
final MockEngineFactory engineFactory = new MockEngineFactory(AssertingDirectoryReader.class);
|
final MockEngineFactory engineFactory = new MockEngineFactory(AssertingDirectoryReader.class);
|
||||||
IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, engineFactory, Collections.emptyMap());
|
IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, engineFactory, Collections.emptyMap());
|
||||||
module.setSearcherWrapper((s) -> new Wrapper());
|
module.setReaderWrapper(s -> new Wrapper());
|
||||||
|
|
||||||
IndexService indexService = newIndexService(module);
|
IndexService indexService = newIndexService(module);
|
||||||
assertTrue(indexService.getSearcherWrapper() instanceof Wrapper);
|
assertTrue(indexService.getReaderWrapper() instanceof Wrapper);
|
||||||
assertSame(indexService.getEngineFactory(), module.getEngineFactory());
|
assertSame(indexService.getEngineFactory(), module.getEngineFactory());
|
||||||
indexService.close("simon says", false);
|
indexService.close("simon says", false);
|
||||||
}
|
}
|
||||||
|
@ -321,7 +319,7 @@ public class IndexModuleTests extends ESTestCase {
|
||||||
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addIndexEventListener(null)).getMessage());
|
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addIndexEventListener(null)).getMessage());
|
||||||
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addIndexOperationListener(null)).getMessage());
|
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addIndexOperationListener(null)).getMessage());
|
||||||
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addSimilarity(null, null)).getMessage());
|
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addSimilarity(null, null)).getMessage());
|
||||||
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.setSearcherWrapper(null)).getMessage());
|
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.setReaderWrapper(null)).getMessage());
|
||||||
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.forceQueryCacheProvider(null)).getMessage());
|
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.forceQueryCacheProvider(null)).getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -455,15 +453,9 @@ public class IndexModuleTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final class Wrapper extends IndexSearcherWrapper {
|
public static final class Wrapper implements CheckedFunction<DirectoryReader, DirectoryReader, IOException> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DirectoryReader wrap(DirectoryReader reader) {
|
public DirectoryReader apply(DirectoryReader reader) {
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -128,7 +128,6 @@ import org.elasticsearch.index.seqno.RetentionLease;
|
||||||
import org.elasticsearch.index.seqno.RetentionLeases;
|
import org.elasticsearch.index.seqno.RetentionLeases;
|
||||||
import org.elasticsearch.index.seqno.SeqNoStats;
|
import org.elasticsearch.index.seqno.SeqNoStats;
|
||||||
import org.elasticsearch.index.seqno.SequenceNumbers;
|
import org.elasticsearch.index.seqno.SequenceNumbers;
|
||||||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
import org.elasticsearch.index.shard.ShardPath;
|
import org.elasticsearch.index.shard.ShardPath;
|
||||||
import org.elasticsearch.index.shard.ShardUtils;
|
import org.elasticsearch.index.shard.ShardUtils;
|
||||||
|
@ -723,36 +722,6 @@ public class InternalEngineTests extends EngineTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIndexSearcherWrapper() throws Exception {
|
|
||||||
final AtomicInteger counter = new AtomicInteger();
|
|
||||||
IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public DirectoryReader wrap(DirectoryReader reader) {
|
|
||||||
counter.incrementAndGet();
|
|
||||||
return reader;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
|
|
||||||
counter.incrementAndGet();
|
|
||||||
return searcher;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
Store store = createStore();
|
|
||||||
Path translog = createTempDir("translog-test");
|
|
||||||
InternalEngine engine = createEngine(store, translog);
|
|
||||||
engine.close();
|
|
||||||
|
|
||||||
engine = new InternalEngine(engine.config());
|
|
||||||
assertTrue(engine.isRecovering());
|
|
||||||
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
|
|
||||||
Engine.Searcher searcher = wrapper.wrap(engine.acquireSearcher("test"));
|
|
||||||
assertThat(counter.get(), equalTo(2));
|
|
||||||
searcher.close();
|
|
||||||
IOUtils.close(store, engine);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testFlushIsDisabledDuringTranslogRecovery() throws IOException {
|
public void testFlushIsDisabledDuringTranslogRecovery() throws IOException {
|
||||||
assertFalse(engine.isRecovering());
|
assertFalse(engine.isRecovering());
|
||||||
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null);
|
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null);
|
||||||
|
|
|
@ -28,32 +28,24 @@ import org.apache.lucene.index.FilterDirectoryReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
import org.apache.lucene.index.LeafReader;
|
import org.apache.lucene.index.LeafReader;
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.Collector;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.apache.lucene.search.TopDocs;
|
||||||
import org.apache.lucene.search.TotalHitCountCollector;
|
|
||||||
import org.apache.lucene.search.Weight;
|
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.elasticsearch.common.CheckedFunction;
|
||||||
import org.elasticsearch.core.internal.io.IOUtils;
|
import org.elasticsearch.core.internal.io.IOUtils;
|
||||||
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||||
import org.elasticsearch.index.engine.Engine;
|
import org.elasticsearch.index.engine.Engine;
|
||||||
import org.elasticsearch.index.engine.EngineException;
|
|
||||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
public class IndexReaderWrapperTests extends ESTestCase {
|
||||||
|
|
||||||
public class IndexSearcherWrapperTests extends ESTestCase {
|
|
||||||
|
|
||||||
public void testReaderCloseListenerIsCalled() throws IOException {
|
public void testReaderCloseListenerIsCalled() throws IOException {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
|
@ -67,23 +59,13 @@ public class IndexSearcherWrapperTests extends ESTestCase {
|
||||||
IndexSearcher searcher = new IndexSearcher(open);
|
IndexSearcher searcher = new IndexSearcher(open);
|
||||||
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value);
|
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value);
|
||||||
final AtomicInteger closeCalls = new AtomicInteger(0);
|
final AtomicInteger closeCalls = new AtomicInteger(0);
|
||||||
IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {
|
CheckedFunction<DirectoryReader, DirectoryReader, IOException> wrapper =
|
||||||
@Override
|
reader -> new FieldMaskingReader("field", reader, closeCalls);
|
||||||
public DirectoryReader wrap(DirectoryReader reader) throws IOException {
|
|
||||||
return new FieldMaskingReader("field", reader, closeCalls);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
|
|
||||||
return searcher;
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
|
||||||
final int sourceRefCount = open.getRefCount();
|
final int sourceRefCount = open.getRefCount();
|
||||||
final AtomicInteger count = new AtomicInteger();
|
final AtomicInteger count = new AtomicInteger();
|
||||||
final AtomicInteger outerCount = new AtomicInteger();
|
final AtomicInteger outerCount = new AtomicInteger();
|
||||||
final AtomicBoolean closeCalled = new AtomicBoolean(false);
|
final AtomicBoolean closeCalled = new AtomicBoolean(false);
|
||||||
final Engine.Searcher wrap = wrapper.wrap(new Engine.Searcher("foo", searcher, () -> closeCalled.set(true)));
|
final Engine.Searcher wrap = IndexShard.wrapSearcher(new Engine.Searcher("foo", searcher, () -> closeCalled.set(true)), wrapper);
|
||||||
assertEquals(1, wrap.reader().getRefCount());
|
assertEquals(1, wrap.reader().getRefCount());
|
||||||
ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> {
|
ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> {
|
||||||
if (key == open.getReaderCacheHelper().getKey()) {
|
if (key == open.getReaderCacheHelper().getKey()) {
|
||||||
|
@ -118,20 +100,11 @@ public class IndexSearcherWrapperTests extends ESTestCase {
|
||||||
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value);
|
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value);
|
||||||
searcher.setSimilarity(iwc.getSimilarity());
|
searcher.setSimilarity(iwc.getSimilarity());
|
||||||
final AtomicInteger closeCalls = new AtomicInteger(0);
|
final AtomicInteger closeCalls = new AtomicInteger(0);
|
||||||
IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {
|
CheckedFunction<DirectoryReader, DirectoryReader, IOException> wrapper =
|
||||||
@Override
|
reader -> new FieldMaskingReader("field", reader, closeCalls);
|
||||||
public DirectoryReader wrap(DirectoryReader reader) throws IOException {
|
|
||||||
return new FieldMaskingReader("field", reader, closeCalls);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
|
|
||||||
return searcher;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
final ConcurrentHashMap<Object, TopDocs> cache = new ConcurrentHashMap<>();
|
final ConcurrentHashMap<Object, TopDocs> cache = new ConcurrentHashMap<>();
|
||||||
AtomicBoolean closeCalled = new AtomicBoolean(false);
|
AtomicBoolean closeCalled = new AtomicBoolean(false);
|
||||||
try (Engine.Searcher wrap = wrapper.wrap(new Engine.Searcher("foo", searcher, () -> closeCalled.set(true)))) {
|
try (Engine.Searcher wrap = IndexShard.wrapSearcher(new Engine.Searcher("foo", searcher, () -> closeCalled.set(true)), wrapper)) {
|
||||||
ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> {
|
ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> {
|
||||||
cache.remove(key);
|
cache.remove(key);
|
||||||
});
|
});
|
||||||
|
@ -159,58 +132,14 @@ public class IndexSearcherWrapperTests extends ESTestCase {
|
||||||
IndexSearcher searcher = new IndexSearcher(open);
|
IndexSearcher searcher = new IndexSearcher(open);
|
||||||
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value);
|
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value);
|
||||||
searcher.setSimilarity(iwc.getSimilarity());
|
searcher.setSimilarity(iwc.getSimilarity());
|
||||||
IndexSearcherWrapper wrapper = new IndexSearcherWrapper();
|
CheckedFunction<DirectoryReader, DirectoryReader, IOException> wrapper = directoryReader -> directoryReader;
|
||||||
try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher, open::close)) {
|
try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher, open::close)) {
|
||||||
final Engine.Searcher wrap = wrapper.wrap(engineSearcher);
|
final Engine.Searcher wrap = IndexShard.wrapSearcher(engineSearcher, wrapper);
|
||||||
assertSame(wrap, engineSearcher);
|
assertSame(wrap, engineSearcher);
|
||||||
}
|
}
|
||||||
IOUtils.close(writer, dir);
|
IOUtils.close(writer, dir);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testWrapVisibility() throws IOException {
|
|
||||||
Directory dir = newDirectory();
|
|
||||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
|
||||||
IndexWriter writer = new IndexWriter(dir, iwc);
|
|
||||||
Document doc = new Document();
|
|
||||||
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
|
|
||||||
doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
|
|
||||||
writer.addDocument(doc);
|
|
||||||
DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
|
|
||||||
IndexSearcher searcher = new IndexSearcher(open);
|
|
||||||
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value);
|
|
||||||
IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {
|
|
||||||
@Override
|
|
||||||
public DirectoryReader wrap(DirectoryReader reader) throws IOException {
|
|
||||||
return reader;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
|
|
||||||
return new IndexSearcher(searcher.getIndexReader()) {
|
|
||||||
@Override
|
|
||||||
protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
|
|
||||||
throw new IllegalStateException("boum");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
|
||||||
final AtomicBoolean closeCalled = new AtomicBoolean(false);
|
|
||||||
final Engine.Searcher wrap = wrapper.wrap(new Engine.Searcher("foo", searcher, () -> closeCalled.set(true)));
|
|
||||||
assertEquals(1, wrap.reader().getRefCount());
|
|
||||||
ContextIndexSearcher contextSearcher = new ContextIndexSearcher(wrap, wrap.searcher().getQueryCache(),
|
|
||||||
wrap.searcher().getQueryCachingPolicy());
|
|
||||||
IllegalStateException exc = expectThrows(IllegalStateException.class,
|
|
||||||
() -> contextSearcher.search(new TermQuery(new Term("field", "doc")), new TotalHitCountCollector()));
|
|
||||||
assertThat(exc.getMessage(), equalTo("boum"));
|
|
||||||
wrap.close();
|
|
||||||
assertFalse("wrapped reader is closed", wrap.reader().tryIncRef());
|
|
||||||
assertTrue(closeCalled.get());
|
|
||||||
|
|
||||||
IOUtils.close(open, writer, dir);
|
|
||||||
assertEquals(0, open.getRefCount());
|
|
||||||
}
|
|
||||||
|
|
||||||
private static class FieldMaskingReader extends FilterDirectoryReader {
|
private static class FieldMaskingReader extends FilterDirectoryReader {
|
||||||
private final String field;
|
private final String field;
|
||||||
private final AtomicInteger closeCalls;
|
private final AtomicInteger closeCalls;
|
|
@ -18,6 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.shard;
|
package org.elasticsearch.index.shard;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.store.LockObtainFailedException;
|
import org.apache.lucene.store.LockObtainFailedException;
|
||||||
import org.elasticsearch.ExceptionsHelper;
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
|
@ -40,6 +41,7 @@ import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
||||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||||
import org.elasticsearch.cluster.service.ClusterService;
|
import org.elasticsearch.cluster.service.ClusterService;
|
||||||
|
import org.elasticsearch.common.CheckedFunction;
|
||||||
import org.elasticsearch.common.CheckedRunnable;
|
import org.elasticsearch.common.CheckedRunnable;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.UUIDs;
|
import org.elasticsearch.common.UUIDs;
|
||||||
|
@ -528,7 +530,7 @@ public class IndexShardIT extends ESSingleNodeTestCase {
|
||||||
client().prepareIndex("test", "test", "1").setSource("{\"foo\" : \"bar\"}", XContentType.JSON)
|
client().prepareIndex("test", "test", "1").setSource("{\"foo\" : \"bar\"}", XContentType.JSON)
|
||||||
.setRefreshPolicy(IMMEDIATE).get();
|
.setRefreshPolicy(IMMEDIATE).get();
|
||||||
|
|
||||||
IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {};
|
CheckedFunction<DirectoryReader, DirectoryReader, IOException> wrapper = directoryReader -> directoryReader;
|
||||||
shard.close("simon says", false);
|
shard.close("simon says", false);
|
||||||
AtomicReference<IndexShard> shardRef = new AtomicReference<>();
|
AtomicReference<IndexShard> shardRef = new AtomicReference<>();
|
||||||
List<Exception> failures = new ArrayList<>();
|
List<Exception> failures = new ArrayList<>();
|
||||||
|
@ -646,10 +648,10 @@ public class IndexShardIT extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final IndexShard newIndexShard(
|
public static final IndexShard newIndexShard(
|
||||||
final IndexService indexService,
|
final IndexService indexService,
|
||||||
final IndexShard shard,IndexSearcherWrapper wrapper,
|
final IndexShard shard, CheckedFunction<DirectoryReader, DirectoryReader, IOException> wrapper,
|
||||||
final CircuitBreakerService cbs,
|
final CircuitBreakerService cbs,
|
||||||
final IndexingOperationListener... listeners) throws IOException {
|
final IndexingOperationListener... listeners) throws IOException {
|
||||||
ShardRouting initializingShardRouting = getInitializingShardRouting(shard.routingEntry());
|
ShardRouting initializingShardRouting = getInitializingShardRouting(shard.routingEntry());
|
||||||
return new IndexShard(
|
return new IndexShard(
|
||||||
initializingShardRouting,
|
initializingShardRouting,
|
||||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.index.CorruptIndexException;
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.apache.lucene.search.TopDocs;
|
||||||
import org.apache.lucene.store.AlreadyClosedException;
|
import org.apache.lucene.store.AlreadyClosedException;
|
||||||
|
@ -54,6 +53,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingHelper;
|
||||||
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
||||||
import org.elasticsearch.cluster.routing.TestShardRouting;
|
import org.elasticsearch.cluster.routing.TestShardRouting;
|
||||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||||
|
import org.elasticsearch.common.CheckedFunction;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.UUIDs;
|
import org.elasticsearch.common.UUIDs;
|
||||||
import org.elasticsearch.common.breaker.CircuitBreaker;
|
import org.elasticsearch.common.breaker.CircuitBreaker;
|
||||||
|
@ -82,7 +82,6 @@ import org.elasticsearch.index.engine.CommitStats;
|
||||||
import org.elasticsearch.index.engine.DocIdSeqNoAndSource;
|
import org.elasticsearch.index.engine.DocIdSeqNoAndSource;
|
||||||
import org.elasticsearch.index.engine.Engine;
|
import org.elasticsearch.index.engine.Engine;
|
||||||
import org.elasticsearch.index.engine.Engine.DeleteResult;
|
import org.elasticsearch.index.engine.Engine.DeleteResult;
|
||||||
import org.elasticsearch.index.engine.EngineException;
|
|
||||||
import org.elasticsearch.index.engine.EngineTestCase;
|
import org.elasticsearch.index.engine.EngineTestCase;
|
||||||
import org.elasticsearch.index.engine.InternalEngine;
|
import org.elasticsearch.index.engine.InternalEngine;
|
||||||
import org.elasticsearch.index.engine.InternalEngineFactory;
|
import org.elasticsearch.index.engine.InternalEngineFactory;
|
||||||
|
@ -2368,7 +2367,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
||||||
closeShards(target);
|
closeShards(target);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSearcherWrapperIsUsed() throws IOException {
|
public void testReaderWrapperIsUsed() throws IOException {
|
||||||
IndexShard shard = newStartedShard(true);
|
IndexShard shard = newStartedShard(true);
|
||||||
indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}");
|
indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}");
|
||||||
indexDoc(shard, "_doc", "1", "{\"foobar\" : \"bar\"}");
|
indexDoc(shard, "_doc", "1", "{\"foobar\" : \"bar\"}");
|
||||||
|
@ -2386,17 +2385,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
||||||
search = searcher.searcher().search(new TermQuery(new Term("foobar", "bar")), 10);
|
search = searcher.searcher().search(new TermQuery(new Term("foobar", "bar")), 10);
|
||||||
assertEquals(search.totalHits.value, 1);
|
assertEquals(search.totalHits.value, 1);
|
||||||
}
|
}
|
||||||
IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {
|
CheckedFunction<DirectoryReader, DirectoryReader, IOException> wrapper = reader -> new FieldMaskingReader("foo", reader);
|
||||||
@Override
|
|
||||||
public DirectoryReader wrap(DirectoryReader reader) throws IOException {
|
|
||||||
return new FieldMaskingReader("foo", reader);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
|
|
||||||
return searcher;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
closeShards(shard);
|
closeShards(shard);
|
||||||
IndexShard newShard = newShard(
|
IndexShard newShard = newShard(
|
||||||
ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.ExistingStoreRecoverySource.INSTANCE),
|
ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.ExistingStoreRecoverySource.INSTANCE),
|
||||||
|
@ -2428,18 +2417,8 @@ public class IndexShardTests extends IndexShardTestCase {
|
||||||
closeShards(newShard);
|
closeShards(newShard);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSearcherWrapperWorksWithGlobalOrdinals() throws IOException {
|
public void testReaderWrapperWorksWithGlobalOrdinals() throws IOException {
|
||||||
IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {
|
CheckedFunction<DirectoryReader, DirectoryReader, IOException> wrapper = reader -> new FieldMaskingReader("foo", reader);
|
||||||
@Override
|
|
||||||
public DirectoryReader wrap(DirectoryReader reader) throws IOException {
|
|
||||||
return new FieldMaskingReader("foo", reader);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
|
|
||||||
return searcher;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||||
|
@ -2540,16 +2519,8 @@ public class IndexShardTests extends IndexShardTestCase {
|
||||||
IndexShard shard = newStartedShard(true);
|
IndexShard shard = newStartedShard(true);
|
||||||
indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}");
|
indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}");
|
||||||
shard.refresh("test");
|
shard.refresh("test");
|
||||||
IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {
|
CheckedFunction<DirectoryReader, DirectoryReader, IOException> wrapper = reader -> {
|
||||||
@Override
|
throw new RuntimeException("boom");
|
||||||
public DirectoryReader wrap(DirectoryReader reader) throws IOException {
|
|
||||||
throw new RuntimeException("boom");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
|
|
||||||
return searcher;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
closeShards(shard);
|
closeShards(shard);
|
||||||
|
|
|
@ -18,17 +18,18 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.indices;
|
package org.elasticsearch.indices;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
|
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
|
import org.elasticsearch.common.CheckedFunction;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.index.IndexService;
|
import org.elasticsearch.index.IndexService;
|
||||||
import org.elasticsearch.index.engine.Engine;
|
import org.elasticsearch.index.engine.Engine;
|
||||||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
|
||||||
import org.elasticsearch.index.shard.IndexShard;
|
import org.elasticsearch.index.shard.IndexShard;
|
||||||
import org.elasticsearch.index.shard.IndexShardIT;
|
import org.elasticsearch.index.shard.IndexShardIT;
|
||||||
import org.elasticsearch.index.shard.IndexShardTestCase;
|
import org.elasticsearch.index.shard.IndexShardTestCase;
|
||||||
|
@ -426,7 +427,7 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase {
|
||||||
client().prepareIndex("test", "test", Integer.toString(i)).setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get();
|
client().prepareIndex("test", "test", Integer.toString(i)).setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get();
|
||||||
}
|
}
|
||||||
|
|
||||||
IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {};
|
CheckedFunction<DirectoryReader, DirectoryReader, IOException> wrapper = directoryReader -> directoryReader;
|
||||||
shard.close("simon says", false);
|
shard.close("simon says", false);
|
||||||
AtomicReference<IndexShard> shardRef = new AtomicReference<>();
|
AtomicReference<IndexShard> shardRef = new AtomicReference<>();
|
||||||
Settings settings = Settings.builder().put("indices.memory.index_buffer_size", "50kb").build();
|
Settings settings = Settings.builder().put("indices.memory.index_buffer_size", "50kb").build();
|
||||||
|
|
|
@ -0,0 +1,425 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.internal;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||||
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.document.StringField;
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
|
import org.apache.lucene.index.FilterDirectoryReader;
|
||||||
|
import org.apache.lucene.index.FilterLeafReader;
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import org.apache.lucene.index.IndexWriter;
|
||||||
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
|
import org.apache.lucene.index.LeafReader;
|
||||||
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
|
import org.apache.lucene.index.NoMergePolicy;
|
||||||
|
import org.apache.lucene.index.PostingsEnum;
|
||||||
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.index.TermsEnum;
|
||||||
|
import org.apache.lucene.search.BulkScorer;
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
|
import org.apache.lucene.search.Explanation;
|
||||||
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
|
import org.apache.lucene.search.LeafCollector;
|
||||||
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.Scorer;
|
||||||
|
import org.apache.lucene.search.TermQuery;
|
||||||
|
import org.apache.lucene.search.Weight;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.util.Accountable;
|
||||||
|
import org.apache.lucene.util.BitSet;
|
||||||
|
import org.apache.lucene.util.BitSetIterator;
|
||||||
|
import org.apache.lucene.util.Bits;
|
||||||
|
import org.apache.lucene.util.CombinedBitSet;
|
||||||
|
import org.apache.lucene.util.FixedBitSet;
|
||||||
|
import org.apache.lucene.util.SparseFixedBitSet;
|
||||||
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
|
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.internal.io.IOUtils;
|
||||||
|
import org.elasticsearch.index.IndexSettings;
|
||||||
|
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||||
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
|
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.elasticsearch.test.IndexSettingsModule;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.UncheckedIOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.IdentityHashMap;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import static org.elasticsearch.search.internal.ContextIndexSearcher.intersectScorerAndBitSet;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
|
|
||||||
|
public class ContextIndexSearcherTests extends ESTestCase {
|
||||||
|
public void testIntersectScorerAndRoleBits() throws Exception {
|
||||||
|
final Directory directory = newDirectory();
|
||||||
|
IndexWriter iw = new IndexWriter(
|
||||||
|
directory,
|
||||||
|
new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE)
|
||||||
|
);
|
||||||
|
|
||||||
|
Document document = new Document();
|
||||||
|
document.add(new StringField("field1", "value1", Field.Store.NO));
|
||||||
|
document.add(new StringField("field2", "value1", Field.Store.NO));
|
||||||
|
iw.addDocument(document);
|
||||||
|
|
||||||
|
document = new Document();
|
||||||
|
document.add(new StringField("field1", "value2", Field.Store.NO));
|
||||||
|
document.add(new StringField("field2", "value1", Field.Store.NO));
|
||||||
|
iw.addDocument(document);
|
||||||
|
|
||||||
|
document = new Document();
|
||||||
|
document.add(new StringField("field1", "value3", Field.Store.NO));
|
||||||
|
document.add(new StringField("field2", "value1", Field.Store.NO));
|
||||||
|
iw.addDocument(document);
|
||||||
|
|
||||||
|
document = new Document();
|
||||||
|
document.add(new StringField("field1", "value4", Field.Store.NO));
|
||||||
|
document.add(new StringField("field2", "value1", Field.Store.NO));
|
||||||
|
iw.addDocument(document);
|
||||||
|
|
||||||
|
iw.commit();
|
||||||
|
iw.deleteDocuments(new Term("field1", "value3"));
|
||||||
|
iw.close();
|
||||||
|
DirectoryReader directoryReader = DirectoryReader.open(directory);
|
||||||
|
IndexSearcher searcher = new IndexSearcher(directoryReader);
|
||||||
|
Weight weight = searcher.createWeight(new TermQuery(new Term("field2", "value1")),
|
||||||
|
org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES, 1f);
|
||||||
|
|
||||||
|
LeafReaderContext leaf = directoryReader.leaves().get(0);
|
||||||
|
|
||||||
|
CombinedBitSet bitSet = new CombinedBitSet(query(leaf, "field1", "value1"), leaf.reader().getLiveDocs());
|
||||||
|
LeafCollector leafCollector = new LeafBucketCollector() {
|
||||||
|
@Override
|
||||||
|
public void collect(int doc, long bucket) throws IOException {
|
||||||
|
assertThat(doc, equalTo(0));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
intersectScorerAndBitSet(weight.scorer(leaf), bitSet, leafCollector, () -> {});
|
||||||
|
|
||||||
|
bitSet = new CombinedBitSet(query(leaf, "field1", "value2"), leaf.reader().getLiveDocs());
|
||||||
|
leafCollector = new LeafBucketCollector() {
|
||||||
|
@Override
|
||||||
|
public void collect(int doc, long bucket) throws IOException {
|
||||||
|
assertThat(doc, equalTo(1));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
intersectScorerAndBitSet(weight.scorer(leaf), bitSet, leafCollector, () -> {});
|
||||||
|
|
||||||
|
|
||||||
|
bitSet = new CombinedBitSet(query(leaf, "field1", "value3"), leaf.reader().getLiveDocs());
|
||||||
|
leafCollector = new LeafBucketCollector() {
|
||||||
|
@Override
|
||||||
|
public void collect(int doc, long bucket) throws IOException {
|
||||||
|
fail("docId [" + doc + "] should have been deleted");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
intersectScorerAndBitSet(weight.scorer(leaf), bitSet, leafCollector, () -> {});
|
||||||
|
|
||||||
|
bitSet = new CombinedBitSet(query(leaf, "field1", "value4"), leaf.reader().getLiveDocs());
|
||||||
|
leafCollector = new LeafBucketCollector() {
|
||||||
|
@Override
|
||||||
|
public void collect(int doc, long bucket) throws IOException {
|
||||||
|
assertThat(doc, equalTo(3));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
intersectScorerAndBitSet(weight.scorer(leaf), bitSet, leafCollector, () -> {});
|
||||||
|
|
||||||
|
directoryReader.close();
|
||||||
|
directory.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testContextIndexSearcherSparseNoDeletions() throws IOException {
|
||||||
|
doTestContextIndexSearcher(true, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testContextIndexSearcherDenseNoDeletions() throws IOException {
|
||||||
|
doTestContextIndexSearcher(false, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testContextIndexSearcherSparseWithDeletions() throws IOException {
|
||||||
|
doTestContextIndexSearcher(true, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testContextIndexSearcherDenseWithDeletions() throws IOException {
|
||||||
|
doTestContextIndexSearcher(false, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void doTestContextIndexSearcher(boolean sparse, boolean deletions) throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(null));
|
||||||
|
Document doc = new Document();
|
||||||
|
StringField allowedField = new StringField("allowed", "yes", Field.Store.NO);
|
||||||
|
doc.add(allowedField);
|
||||||
|
StringField fooField = new StringField("foo", "bar", Field.Store.NO);
|
||||||
|
doc.add(fooField);
|
||||||
|
StringField deleteField = new StringField("delete", "no", Field.Store.NO);
|
||||||
|
doc.add(deleteField);
|
||||||
|
w.addDocument(doc);
|
||||||
|
if (deletions) {
|
||||||
|
// add a document that matches foo:bar but will be deleted
|
||||||
|
deleteField.setStringValue("yes");
|
||||||
|
w.addDocument(doc);
|
||||||
|
deleteField.setStringValue("no");
|
||||||
|
}
|
||||||
|
allowedField.setStringValue("no");
|
||||||
|
w.addDocument(doc);
|
||||||
|
if (sparse) {
|
||||||
|
for (int i = 0; i < 1000; ++i) {
|
||||||
|
w.addDocument(doc);
|
||||||
|
}
|
||||||
|
w.forceMerge(1);
|
||||||
|
}
|
||||||
|
w.deleteDocuments(new Term("delete", "yes"));
|
||||||
|
|
||||||
|
IndexSettings settings = IndexSettingsModule.newIndexSettings("_index", Settings.EMPTY);
|
||||||
|
BitsetFilterCache.Listener listener = new BitsetFilterCache.Listener() {
|
||||||
|
@Override
|
||||||
|
public void onCache(ShardId shardId, Accountable accountable) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onRemoval(ShardId shardId, Accountable accountable) {
|
||||||
|
|
||||||
|
}
|
||||||
|
};
|
||||||
|
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(w),
|
||||||
|
new ShardId(settings.getIndex(), 0));
|
||||||
|
BitsetFilterCache cache = new BitsetFilterCache(settings, listener);
|
||||||
|
Query roleQuery = new TermQuery(new Term("allowed", "yes"));
|
||||||
|
BitSet bitSet = cache.getBitSetProducer(roleQuery).getBitSet(reader.leaves().get(0));
|
||||||
|
if (sparse) {
|
||||||
|
assertThat(bitSet, instanceOf(SparseFixedBitSet.class));
|
||||||
|
} else {
|
||||||
|
assertThat(bitSet, instanceOf(FixedBitSet.class));
|
||||||
|
}
|
||||||
|
|
||||||
|
DocumentSubsetDirectoryReader filteredReader = new DocumentSubsetDirectoryReader(reader, cache, roleQuery);
|
||||||
|
|
||||||
|
ContextIndexSearcher searcher = new ContextIndexSearcher(filteredReader, IndexSearcher.getDefaultSimilarity(),
|
||||||
|
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy());
|
||||||
|
searcher.setCheckCancelled(() -> {});
|
||||||
|
|
||||||
|
// Searching a non-existing term will trigger a null scorer
|
||||||
|
assertEquals(0, searcher.count(new TermQuery(new Term("non_existing_field", "non_existing_value"))));
|
||||||
|
|
||||||
|
assertEquals(1, searcher.count(new TermQuery(new Term("foo", "bar"))));
|
||||||
|
|
||||||
|
// make sure scorers are created only once, see #1725
|
||||||
|
assertEquals(1, searcher.count(new CreateScorerOnceQuery(new MatchAllDocsQuery())));
|
||||||
|
IOUtils.close(reader, w, dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
private SparseFixedBitSet query(LeafReaderContext leaf, String field, String value) throws IOException {
|
||||||
|
SparseFixedBitSet sparseFixedBitSet = new SparseFixedBitSet(leaf.reader().maxDoc());
|
||||||
|
TermsEnum tenum = leaf.reader().terms(field).iterator();
|
||||||
|
while (tenum.next().utf8ToString().equals(value) == false) {
|
||||||
|
}
|
||||||
|
PostingsEnum penum = tenum.postings(null);
|
||||||
|
sparseFixedBitSet.or(penum);
|
||||||
|
return sparseFixedBitSet;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class DocumentSubsetDirectoryReader extends FilterDirectoryReader {
|
||||||
|
private final BitsetFilterCache bitsetFilterCache;
|
||||||
|
private final Query roleQuery;
|
||||||
|
|
||||||
|
DocumentSubsetDirectoryReader(DirectoryReader in, BitsetFilterCache bitsetFilterCache, Query roleQuery) throws IOException {
|
||||||
|
super(in, new SubReaderWrapper() {
|
||||||
|
@Override
|
||||||
|
public LeafReader wrap(LeafReader reader) {
|
||||||
|
try {
|
||||||
|
return new DocumentSubsetReader(reader, bitsetFilterCache, roleQuery);
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw ExceptionsHelper.convertToElastic(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
this.bitsetFilterCache = bitsetFilterCache;
|
||||||
|
this.roleQuery = roleQuery;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
|
||||||
|
return new DocumentSubsetDirectoryReader(in, bitsetFilterCache, roleQuery);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheHelper getReaderCacheHelper() {
|
||||||
|
return in.getReaderCacheHelper();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class DocumentSubsetReader extends FilterLeafReader {
|
||||||
|
private final BitSet roleQueryBits;
|
||||||
|
private final int numDocs;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <p>Construct a FilterLeafReader based on the specified base reader.
|
||||||
|
* <p>Note that base reader is closed if this FilterLeafReader is closed.</p>
|
||||||
|
*
|
||||||
|
* @param in specified base reader.
|
||||||
|
*/
|
||||||
|
DocumentSubsetReader(LeafReader in, BitsetFilterCache bitsetFilterCache, Query roleQuery) throws IOException {
|
||||||
|
super(in);
|
||||||
|
this.roleQueryBits = bitsetFilterCache.getBitSetProducer(roleQuery).getBitSet(in.getContext());
|
||||||
|
this.numDocs = computeNumDocs(in, roleQueryBits);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheHelper getCoreCacheHelper() {
|
||||||
|
return in.getCoreCacheHelper();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheHelper getReaderCacheHelper() {
|
||||||
|
// Not delegated since we change the live docs
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int numDocs() {
|
||||||
|
return numDocs;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Bits getLiveDocs() {
|
||||||
|
final Bits actualLiveDocs = in.getLiveDocs();
|
||||||
|
if (roleQueryBits == null) {
|
||||||
|
return new Bits.MatchNoBits(in.maxDoc());
|
||||||
|
} else if (actualLiveDocs == null) {
|
||||||
|
return roleQueryBits;
|
||||||
|
} else {
|
||||||
|
// apply deletes when needed:
|
||||||
|
return new CombinedBitSet(roleQueryBits, actualLiveDocs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static int computeNumDocs(LeafReader reader, BitSet roleQueryBits) {
|
||||||
|
final Bits liveDocs = reader.getLiveDocs();
|
||||||
|
if (roleQueryBits == null) {
|
||||||
|
return 0;
|
||||||
|
} else if (liveDocs == null) {
|
||||||
|
// slow
|
||||||
|
return roleQueryBits.cardinality();
|
||||||
|
} else {
|
||||||
|
// very slow, but necessary in order to be correct
|
||||||
|
int numDocs = 0;
|
||||||
|
DocIdSetIterator it = new BitSetIterator(roleQueryBits, 0L); // we don't use the cost
|
||||||
|
try {
|
||||||
|
for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) {
|
||||||
|
if (liveDocs.get(doc)) {
|
||||||
|
numDocs++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return numDocs;
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new UncheckedIOException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class CreateScorerOnceWeight extends Weight {
|
||||||
|
|
||||||
|
private final Weight weight;
|
||||||
|
private final Set<Object> seenLeaves = Collections.newSetFromMap(new IdentityHashMap<>());
|
||||||
|
|
||||||
|
CreateScorerOnceWeight(Weight weight) {
|
||||||
|
super(weight.getQuery());
|
||||||
|
this.weight = weight;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void extractTerms(Set<Term> terms) {
|
||||||
|
weight.extractTerms(terms);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||||
|
return weight.explain(context, doc);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||||
|
assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey()));
|
||||||
|
return weight.scorer(context);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BulkScorer bulkScorer(LeafReaderContext context)
|
||||||
|
throws IOException {
|
||||||
|
assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey()));
|
||||||
|
return weight.bulkScorer(context);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isCacheable(LeafReaderContext ctx) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class CreateScorerOnceQuery extends Query {
|
||||||
|
|
||||||
|
private final Query query;
|
||||||
|
|
||||||
|
CreateScorerOnceQuery(Query query) {
|
||||||
|
this.query = query;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString(String field) {
|
||||||
|
return query.toString(field);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Query rewrite(IndexReader reader) throws IOException {
|
||||||
|
Query queryRewritten = query.rewrite(reader);
|
||||||
|
if (query != queryRewritten) {
|
||||||
|
return new CreateScorerOnceQuery(queryRewritten);
|
||||||
|
}
|
||||||
|
return super.rewrite(reader);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Weight createWeight(IndexSearcher searcher, org.apache.lucene.search.ScoreMode scoreMode, float boost) throws IOException {
|
||||||
|
return new CreateScorerOnceWeight(query.createWeight(searcher, scoreMode, boost));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
return sameClassAs(obj) && query.equals(((CreateScorerOnceQuery) obj).query);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return 31 * classHash() + query.hashCode();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -44,7 +44,6 @@ import org.apache.lucene.search.Weight;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.elasticsearch.core.internal.io.IOUtils;
|
import org.elasticsearch.core.internal.io.IOUtils;
|
||||||
import org.apache.lucene.util.TestUtil;
|
import org.apache.lucene.util.TestUtil;
|
||||||
import org.elasticsearch.index.engine.Engine;
|
|
||||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||||
import org.elasticsearch.search.profile.ProfileResult;
|
import org.elasticsearch.search.profile.ProfileResult;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
@ -81,8 +80,8 @@ public class QueryProfilerTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
reader = w.getReader();
|
reader = w.getReader();
|
||||||
w.close();
|
w.close();
|
||||||
Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader), null);
|
searcher = new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(),
|
||||||
searcher = new ContextIndexSearcher(engineSearcher, IndexSearcher.getDefaultQueryCache(), MAYBE_CACHE_POLICY);
|
IndexSearcher.getDefaultQueryCache(), MAYBE_CACHE_POLICY);
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
|
@ -159,10 +158,10 @@ public class QueryProfilerTests extends ESTestCase {
|
||||||
|
|
||||||
public void testApproximations() throws IOException {
|
public void testApproximations() throws IOException {
|
||||||
QueryProfiler profiler = new QueryProfiler();
|
QueryProfiler profiler = new QueryProfiler();
|
||||||
Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader), reader::close);
|
|
||||||
// disable query caching since we want to test approximations, which won't
|
// disable query caching since we want to test approximations, which won't
|
||||||
// be exposed on a cached entry
|
// be exposed on a cached entry
|
||||||
ContextIndexSearcher searcher = new ContextIndexSearcher(engineSearcher, null, MAYBE_CACHE_POLICY);
|
ContextIndexSearcher searcher = new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(),
|
||||||
|
null, MAYBE_CACHE_POLICY);
|
||||||
searcher.setProfiler(profiler);
|
searcher.setProfiler(profiler);
|
||||||
Query query = new RandomApproximationQuery(new TermQuery(new Term("foo", "bar")), random());
|
Query query = new RandomApproximationQuery(new TermQuery(new Term("foo", "bar")), random());
|
||||||
searcher.count(query);
|
searcher.count(query);
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.shard;
|
package org.elasticsearch.index.shard;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.index.IndexNotFoundException;
|
import org.apache.lucene.index.IndexNotFoundException;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
|
@ -278,8 +279,9 @@ public abstract class IndexShardTestCase extends ESTestCase {
|
||||||
* (ready to recover from another shard)
|
* (ready to recover from another shard)
|
||||||
*/
|
*/
|
||||||
protected IndexShard newShard(ShardId shardId, boolean primary, String nodeId, IndexMetaData indexMetaData,
|
protected IndexShard newShard(ShardId shardId, boolean primary, String nodeId, IndexMetaData indexMetaData,
|
||||||
@Nullable IndexSearcherWrapper searcherWrapper) throws IOException {
|
@Nullable CheckedFunction<DirectoryReader, DirectoryReader, IOException> readerWrapper)
|
||||||
return newShard(shardId, primary, nodeId, indexMetaData, searcherWrapper, () -> {});
|
throws IOException {
|
||||||
|
return newShard(shardId, primary, nodeId, indexMetaData, readerWrapper, () -> {});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -291,11 +293,12 @@ public abstract class IndexShardTestCase extends ESTestCase {
|
||||||
* (ready to recover from another shard)
|
* (ready to recover from another shard)
|
||||||
*/
|
*/
|
||||||
protected IndexShard newShard(ShardId shardId, boolean primary, String nodeId, IndexMetaData indexMetaData,
|
protected IndexShard newShard(ShardId shardId, boolean primary, String nodeId, IndexMetaData indexMetaData,
|
||||||
@Nullable IndexSearcherWrapper searcherWrapper, Runnable globalCheckpointSyncer) throws IOException {
|
@Nullable CheckedFunction<DirectoryReader, DirectoryReader, IOException> readerWrapper,
|
||||||
|
Runnable globalCheckpointSyncer) throws IOException {
|
||||||
ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, nodeId, primary, ShardRoutingState.INITIALIZING,
|
ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, nodeId, primary, ShardRoutingState.INITIALIZING,
|
||||||
primary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE);
|
primary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE);
|
||||||
return newShard(
|
return newShard(
|
||||||
shardRouting, indexMetaData, searcherWrapper, new InternalEngineFactory(), globalCheckpointSyncer, RetentionLeaseSyncer.EMPTY);
|
shardRouting, indexMetaData, readerWrapper, new InternalEngineFactory(), globalCheckpointSyncer, RetentionLeaseSyncer.EMPTY);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -306,10 +309,11 @@ public abstract class IndexShardTestCase extends ESTestCase {
|
||||||
* @param indexMetaData indexMetaData for the shard, including any mapping
|
* @param indexMetaData indexMetaData for the shard, including any mapping
|
||||||
* @param listeners an optional set of listeners to add to the shard
|
* @param listeners an optional set of listeners to add to the shard
|
||||||
*/
|
*/
|
||||||
protected IndexShard newShard(
|
protected IndexShard newShard(ShardRouting routing, IndexMetaData indexMetaData,
|
||||||
ShardRouting routing, IndexMetaData indexMetaData, EngineFactory engineFactory, IndexingOperationListener... listeners)
|
@Nullable CheckedFunction<DirectoryReader, DirectoryReader, IOException> indexReaderWrapper,
|
||||||
|
EngineFactory engineFactory, IndexingOperationListener... listeners)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return newShard(routing, indexMetaData, null, engineFactory, () -> {}, RetentionLeaseSyncer.EMPTY, listeners);
|
return newShard(routing, indexMetaData, indexReaderWrapper, engineFactory, () -> {}, RetentionLeaseSyncer.EMPTY, listeners);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -317,22 +321,20 @@ public abstract class IndexShardTestCase extends ESTestCase {
|
||||||
* current node id the shard is assigned to.
|
* current node id the shard is assigned to.
|
||||||
* @param routing shard routing to use
|
* @param routing shard routing to use
|
||||||
* @param indexMetaData indexMetaData for the shard, including any mapping
|
* @param indexMetaData indexMetaData for the shard, including any mapping
|
||||||
* @param indexSearcherWrapper an optional wrapper to be used during searchers
|
* @param indexReaderWrapper an optional wrapper to be used during search
|
||||||
* @param globalCheckpointSyncer callback for syncing global checkpoints
|
* @param globalCheckpointSyncer callback for syncing global checkpoints
|
||||||
* @param listeners an optional set of listeners to add to the shard
|
* @param listeners an optional set of listeners to add to the shard
|
||||||
*/
|
*/
|
||||||
protected IndexShard newShard(ShardRouting routing, IndexMetaData indexMetaData,
|
protected IndexShard newShard(ShardRouting routing, IndexMetaData indexMetaData,
|
||||||
@Nullable IndexSearcherWrapper indexSearcherWrapper,
|
@Nullable CheckedFunction<DirectoryReader, DirectoryReader, IOException> indexReaderWrapper,
|
||||||
@Nullable EngineFactory engineFactory,
|
@Nullable EngineFactory engineFactory, Runnable globalCheckpointSyncer, RetentionLeaseSyncer retentionLeaseSyncer,
|
||||||
Runnable globalCheckpointSyncer,
|
IndexingOperationListener... listeners)
|
||||||
RetentionLeaseSyncer retentionLeaseSyncer,
|
|
||||||
IndexingOperationListener... listeners)
|
|
||||||
throws IOException {
|
throws IOException {
|
||||||
// add node id as name to settings for proper logging
|
// add node id as name to settings for proper logging
|
||||||
final ShardId shardId = routing.shardId();
|
final ShardId shardId = routing.shardId();
|
||||||
final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir());
|
final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir());
|
||||||
ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId);
|
ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId);
|
||||||
return newShard(routing, shardPath, indexMetaData, null, indexSearcherWrapper, engineFactory, globalCheckpointSyncer,
|
return newShard(routing, shardPath, indexMetaData, null, indexReaderWrapper, engineFactory, globalCheckpointSyncer,
|
||||||
retentionLeaseSyncer, EMPTY_EVENT_LISTENER, listeners);
|
retentionLeaseSyncer, EMPTY_EVENT_LISTENER, listeners);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -342,14 +344,14 @@ public abstract class IndexShardTestCase extends ESTestCase {
|
||||||
* @param shardPath path to use for shard data
|
* @param shardPath path to use for shard data
|
||||||
* @param indexMetaData indexMetaData for the shard, including any mapping
|
* @param indexMetaData indexMetaData for the shard, including any mapping
|
||||||
* @param storeProvider an optional custom store provider to use. If null a default file based store will be created
|
* @param storeProvider an optional custom store provider to use. If null a default file based store will be created
|
||||||
* @param indexSearcherWrapper an optional wrapper to be used during searchers
|
* @param indexReaderWrapper an optional wrapper to be used during search
|
||||||
* @param globalCheckpointSyncer callback for syncing global checkpoints
|
* @param globalCheckpointSyncer callback for syncing global checkpoints
|
||||||
* @param indexEventListener index event listener
|
* @param indexEventListener index event listener
|
||||||
* @param listeners an optional set of listeners to add to the shard
|
* @param listeners an optional set of listeners to add to the shard
|
||||||
*/
|
*/
|
||||||
protected IndexShard newShard(ShardRouting routing, ShardPath shardPath, IndexMetaData indexMetaData,
|
protected IndexShard newShard(ShardRouting routing, ShardPath shardPath, IndexMetaData indexMetaData,
|
||||||
@Nullable CheckedFunction<IndexSettings, Store, IOException> storeProvider,
|
@Nullable CheckedFunction<IndexSettings, Store, IOException> storeProvider,
|
||||||
@Nullable IndexSearcherWrapper indexSearcherWrapper,
|
@Nullable CheckedFunction<DirectoryReader, DirectoryReader, IOException> indexReaderWrapper,
|
||||||
@Nullable EngineFactory engineFactory,
|
@Nullable EngineFactory engineFactory,
|
||||||
Runnable globalCheckpointSyncer, RetentionLeaseSyncer retentionLeaseSyncer,
|
Runnable globalCheckpointSyncer, RetentionLeaseSyncer retentionLeaseSyncer,
|
||||||
IndexEventListener indexEventListener, IndexingOperationListener... listeners) throws IOException {
|
IndexEventListener indexEventListener, IndexingOperationListener... listeners) throws IOException {
|
||||||
|
@ -382,7 +384,7 @@ public abstract class IndexShardTestCase extends ESTestCase {
|
||||||
similarityService,
|
similarityService,
|
||||||
engineFactory,
|
engineFactory,
|
||||||
indexEventListener,
|
indexEventListener,
|
||||||
indexSearcherWrapper,
|
indexReaderWrapper,
|
||||||
threadPool,
|
threadPool,
|
||||||
BigArrays.NON_RECYCLING_INSTANCE,
|
BigArrays.NON_RECYCLING_INSTANCE,
|
||||||
warmer,
|
warmer,
|
||||||
|
|
|
@ -32,7 +32,6 @@ import org.apache.lucene.search.QueryCache;
|
||||||
import org.apache.lucene.search.QueryCachingPolicy;
|
import org.apache.lucene.search.QueryCachingPolicy;
|
||||||
import org.apache.lucene.search.ScoreMode;
|
import org.apache.lucene.search.ScoreMode;
|
||||||
import org.apache.lucene.search.Weight;
|
import org.apache.lucene.search.Weight;
|
||||||
import org.apache.lucene.search.XIndexSearcher;
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.common.lease.Releasable;
|
import org.elasticsearch.common.lease.Releasable;
|
||||||
|
@ -47,7 +46,6 @@ import org.elasticsearch.index.IndexSettings;
|
||||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache.Listener;
|
import org.elasticsearch.index.cache.bitset.BitsetFilterCache.Listener;
|
||||||
import org.elasticsearch.index.cache.query.DisabledQueryCache;
|
import org.elasticsearch.index.cache.query.DisabledQueryCache;
|
||||||
import org.elasticsearch.index.engine.Engine;
|
|
||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
||||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||||
|
@ -240,7 +238,6 @@ public abstract class AggregatorTestCase extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected SearchContext createSearchContext(IndexSearcher indexSearcher, IndexSettings indexSettings) {
|
protected SearchContext createSearchContext(IndexSearcher indexSearcher, IndexSettings indexSettings) {
|
||||||
Engine.Searcher searcher = new Engine.Searcher("aggregator_test", indexSearcher, () -> indexSearcher.getIndexReader().close());
|
|
||||||
QueryCache queryCache = new DisabledQueryCache(indexSettings);
|
QueryCache queryCache = new DisabledQueryCache(indexSettings);
|
||||||
QueryCachingPolicy queryCachingPolicy = new QueryCachingPolicy() {
|
QueryCachingPolicy queryCachingPolicy = new QueryCachingPolicy() {
|
||||||
@Override
|
@Override
|
||||||
|
@ -253,7 +250,8 @@ public abstract class AggregatorTestCase extends ESTestCase {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
ContextIndexSearcher contextIndexSearcher = new ContextIndexSearcher(searcher, queryCache, queryCachingPolicy);
|
ContextIndexSearcher contextIndexSearcher = new ContextIndexSearcher(indexSearcher.getIndexReader(),
|
||||||
|
indexSearcher.getSimilarity(), queryCache, queryCachingPolicy);
|
||||||
|
|
||||||
SearchContext searchContext = mock(SearchContext.class);
|
SearchContext searchContext = mock(SearchContext.class);
|
||||||
when(searchContext.numberOfShards()).thenReturn(1);
|
when(searchContext.numberOfShards()).thenReturn(1);
|
||||||
|
@ -464,16 +462,8 @@ public abstract class AggregatorTestCase extends ESTestCase {
|
||||||
*/
|
*/
|
||||||
protected static IndexSearcher newIndexSearcher(IndexReader indexReader) {
|
protected static IndexSearcher newIndexSearcher(IndexReader indexReader) {
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
final IndexSearcher delegate = new IndexSearcher(indexReader);
|
|
||||||
final XIndexSearcher wrappedSearcher = new XIndexSearcher(delegate);
|
|
||||||
// this executes basic query checks and asserts that weights are normalized only once etc.
|
// this executes basic query checks and asserts that weights are normalized only once etc.
|
||||||
return new AssertingIndexSearcher(random(), indexReader) {
|
return new AssertingIndexSearcher(random(), indexReader);
|
||||||
@Override
|
|
||||||
protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
|
|
||||||
// we cannot use the asserting searcher because the weight is created by the ContextIndexSearcher
|
|
||||||
wrappedSearcher.search(leaves, weight, collector);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
} else {
|
} else {
|
||||||
return new IndexSearcher(indexReader);
|
return new IndexSearcher(indexReader);
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,14 +24,9 @@ import org.apache.lucene.index.AssertingDirectoryReader;
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.index.FilterDirectoryReader;
|
import org.apache.lucene.index.FilterDirectoryReader;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
|
||||||
import org.apache.lucene.search.AssertingIndexSearcher;
|
import org.apache.lucene.search.AssertingIndexSearcher;
|
||||||
import org.apache.lucene.search.Collector;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
|
||||||
import org.apache.lucene.search.QueryCache;
|
import org.apache.lucene.search.QueryCache;
|
||||||
import org.apache.lucene.search.QueryCachingPolicy;
|
import org.apache.lucene.search.QueryCachingPolicy;
|
||||||
import org.apache.lucene.search.Weight;
|
|
||||||
import org.apache.lucene.search.XIndexSearcher;
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
|
@ -47,7 +42,6 @@ import java.io.Closeable;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.lang.reflect.Constructor;
|
import java.lang.reflect.Constructor;
|
||||||
import java.util.IdentityHashMap;
|
import java.util.IdentityHashMap;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
|
||||||
|
@ -151,19 +145,8 @@ public final class MockEngineSupport {
|
||||||
if (reader instanceof DirectoryReader && mockContext.wrapReader) {
|
if (reader instanceof DirectoryReader && mockContext.wrapReader) {
|
||||||
wrappedReader = wrapReader((DirectoryReader) reader);
|
wrappedReader = wrapReader((DirectoryReader) reader);
|
||||||
}
|
}
|
||||||
final IndexSearcher delegate = new IndexSearcher(wrappedReader);
|
|
||||||
delegate.setSimilarity(searcher.searcher().getSimilarity());
|
|
||||||
delegate.setQueryCache(filterCache);
|
|
||||||
delegate.setQueryCachingPolicy(filterCachingPolicy);
|
|
||||||
final XIndexSearcher wrappedSearcher = new XIndexSearcher(delegate);
|
|
||||||
// this executes basic query checks and asserts that weights are normalized only once etc.
|
// this executes basic query checks and asserts that weights are normalized only once etc.
|
||||||
final AssertingIndexSearcher assertingIndexSearcher = new AssertingIndexSearcher(mockContext.random, wrappedReader) {
|
final AssertingIndexSearcher assertingIndexSearcher = new AssertingIndexSearcher(mockContext.random, wrappedReader);
|
||||||
@Override
|
|
||||||
protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
|
|
||||||
// we cannot use the asserting searcher because the weight is created by the ContextIndexSearcher
|
|
||||||
wrappedSearcher.search(leaves, weight, collector);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
assertingIndexSearcher.setSimilarity(searcher.searcher().getSimilarity());
|
assertingIndexSearcher.setSimilarity(searcher.searcher().getSimilarity());
|
||||||
assertingIndexSearcher.setQueryCache(filterCache);
|
assertingIndexSearcher.setQueryCache(filterCache);
|
||||||
assertingIndexSearcher.setQueryCachingPolicy(filterCachingPolicy);
|
assertingIndexSearcher.setQueryCachingPolicy(filterCachingPolicy);
|
||||||
|
|
|
@ -272,8 +272,8 @@ public final class FrozenEngine extends ReadOnlyEngine {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void validateSearchContext(SearchContext context, TransportRequest transportRequest) {
|
public void validateSearchContext(SearchContext context, TransportRequest transportRequest) {
|
||||||
Searcher engineSearcher = context.searcher().getEngineSearcher();
|
DirectoryReader dirReader = context.searcher().getDirectoryReader();
|
||||||
LazyDirectoryReader lazyDirectoryReader = unwrapLazyReader(engineSearcher.getDirectoryReader());
|
LazyDirectoryReader lazyDirectoryReader = unwrapLazyReader(dirReader);
|
||||||
if (lazyDirectoryReader != null) {
|
if (lazyDirectoryReader != null) {
|
||||||
try {
|
try {
|
||||||
lazyDirectoryReader.reset();
|
lazyDirectoryReader.reset();
|
||||||
|
@ -297,8 +297,8 @@ public final class FrozenEngine extends ReadOnlyEngine {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onNewContext(SearchContext context) {
|
public void onNewContext(SearchContext context) {
|
||||||
Searcher engineSearcher = context.searcher().getEngineSearcher();
|
DirectoryReader dirReader = context.searcher().getDirectoryReader();
|
||||||
LazyDirectoryReader lazyDirectoryReader = unwrapLazyReader(engineSearcher.getDirectoryReader());
|
LazyDirectoryReader lazyDirectoryReader = unwrapLazyReader(dirReader);
|
||||||
if (lazyDirectoryReader != null) {
|
if (lazyDirectoryReader != null) {
|
||||||
registerRelease(context, lazyDirectoryReader);
|
registerRelease(context, lazyDirectoryReader);
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,6 +16,7 @@ import org.apache.lucene.store.AlreadyClosedException;
|
||||||
import org.apache.lucene.util.BitSet;
|
import org.apache.lucene.util.BitSet;
|
||||||
import org.apache.lucene.util.BitSetIterator;
|
import org.apache.lucene.util.BitSetIterator;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
|
import org.apache.lucene.util.CombinedBitSet;
|
||||||
import org.elasticsearch.ExceptionsHelper;
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
import org.elasticsearch.common.cache.Cache;
|
import org.elasticsearch.common.cache.Cache;
|
||||||
import org.elasticsearch.common.cache.CacheBuilder;
|
import org.elasticsearch.common.cache.CacheBuilder;
|
||||||
|
@ -172,18 +173,7 @@ public final class DocumentSubsetReader extends FilterLeafReader {
|
||||||
return roleQueryBits;
|
return roleQueryBits;
|
||||||
} else {
|
} else {
|
||||||
// apply deletes when needed:
|
// apply deletes when needed:
|
||||||
return new Bits() {
|
return new CombinedBitSet(roleQueryBits, actualLiveDocs);
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean get(int index) {
|
|
||||||
return roleQueryBits.get(index) && actualLiveDocs.get(index);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int length() {
|
|
||||||
return roleQueryBits.length();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -208,13 +198,4 @@ public final class DocumentSubsetReader extends FilterLeafReader {
|
||||||
// Not delegated since we change the live docs
|
// Not delegated since we change the live docs
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
BitSet getRoleQueryBits() {
|
|
||||||
return roleQueryBits;
|
|
||||||
}
|
|
||||||
|
|
||||||
Bits getWrappedLiveDocs() {
|
|
||||||
return in.getLiveDocs();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,111 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.core.security.authz.accesscontrol;
|
||||||
|
|
||||||
|
import org.apache.logging.log4j.LogManager;
|
||||||
|
import org.apache.logging.log4j.Logger;
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
|
import org.apache.lucene.search.BooleanQuery;
|
||||||
|
import org.apache.lucene.search.ConstantScoreQuery;
|
||||||
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
|
import org.elasticsearch.common.CheckedFunction;
|
||||||
|
import org.elasticsearch.common.logging.LoggerMessageFormat;
|
||||||
|
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||||
|
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||||
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
|
import org.elasticsearch.index.shard.ShardUtils;
|
||||||
|
import org.elasticsearch.license.XPackLicenseState;
|
||||||
|
import org.elasticsearch.script.ScriptService;
|
||||||
|
import org.elasticsearch.xpack.core.security.authc.Authentication;
|
||||||
|
import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField;
|
||||||
|
import org.elasticsearch.xpack.core.security.authz.permission.DocumentPermissions;
|
||||||
|
import org.elasticsearch.xpack.core.security.support.Exceptions;
|
||||||
|
import org.elasticsearch.xpack.core.security.user.User;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An IndexReader wrapper implementation that is used for field and document level security.
|
||||||
|
* <p>
|
||||||
|
* Based on the {@link ThreadContext} this class will enable field and/or document level security.
|
||||||
|
* <p>
|
||||||
|
* Field level security is enabled by wrapping the original {@link DirectoryReader} in a {@link FieldSubsetReader}
|
||||||
|
* in the {@link #apply(DirectoryReader)} method.
|
||||||
|
* <p>
|
||||||
|
* Document level security is enabled by wrapping the original {@link DirectoryReader} in a {@link DocumentSubsetReader}
|
||||||
|
* instance.
|
||||||
|
*/
|
||||||
|
public class SecurityIndexReaderWrapper implements CheckedFunction<DirectoryReader, DirectoryReader, IOException> {
|
||||||
|
private static final Logger logger = LogManager.getLogger(SecurityIndexReaderWrapper.class);
|
||||||
|
|
||||||
|
private final Function<ShardId, QueryShardContext> queryShardContextProvider;
|
||||||
|
private final BitsetFilterCache bitsetFilterCache;
|
||||||
|
private final XPackLicenseState licenseState;
|
||||||
|
private final ThreadContext threadContext;
|
||||||
|
private final ScriptService scriptService;
|
||||||
|
|
||||||
|
public SecurityIndexReaderWrapper(Function<ShardId, QueryShardContext> queryShardContextProvider,
|
||||||
|
BitsetFilterCache bitsetFilterCache, ThreadContext threadContext, XPackLicenseState licenseState,
|
||||||
|
ScriptService scriptService) {
|
||||||
|
this.scriptService = scriptService;
|
||||||
|
this.queryShardContextProvider = queryShardContextProvider;
|
||||||
|
this.bitsetFilterCache = bitsetFilterCache;
|
||||||
|
this.threadContext = threadContext;
|
||||||
|
this.licenseState = licenseState;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DirectoryReader apply(final DirectoryReader reader) {
|
||||||
|
if (licenseState.isDocumentAndFieldLevelSecurityAllowed() == false) {
|
||||||
|
return reader;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
final IndicesAccessControl indicesAccessControl = getIndicesAccessControl();
|
||||||
|
|
||||||
|
ShardId shardId = ShardUtils.extractShardId(reader);
|
||||||
|
if (shardId == null) {
|
||||||
|
throw new IllegalStateException(LoggerMessageFormat.format("couldn't extract shardId from reader [{}]", reader));
|
||||||
|
}
|
||||||
|
|
||||||
|
final IndicesAccessControl.IndexAccessControl permissions = indicesAccessControl.getIndexPermissions(shardId.getIndexName());
|
||||||
|
// No permissions have been defined for an index, so don't intercept the index reader for access control
|
||||||
|
if (permissions == null) {
|
||||||
|
return reader;
|
||||||
|
}
|
||||||
|
|
||||||
|
DirectoryReader wrappedReader = reader;
|
||||||
|
DocumentPermissions documentPermissions = permissions.getDocumentPermissions();
|
||||||
|
if (documentPermissions != null && documentPermissions.hasDocumentLevelPermissions()) {
|
||||||
|
BooleanQuery filterQuery = documentPermissions.filter(getUser(), scriptService, shardId, queryShardContextProvider);
|
||||||
|
if (filterQuery != null) {
|
||||||
|
wrappedReader = DocumentSubsetReader.wrap(wrappedReader, bitsetFilterCache, new ConstantScoreQuery(filterQuery));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return permissions.getFieldPermissions().filter(wrappedReader);
|
||||||
|
} catch (IOException e) {
|
||||||
|
logger.error("Unable to apply field level security");
|
||||||
|
throw ExceptionsHelper.convertToElastic(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected IndicesAccessControl getIndicesAccessControl() {
|
||||||
|
IndicesAccessControl indicesAccessControl = threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY);
|
||||||
|
if (indicesAccessControl == null) {
|
||||||
|
throw Exceptions.authorizationError("no indices permissions found");
|
||||||
|
}
|
||||||
|
return indicesAccessControl;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected User getUser(){
|
||||||
|
Authentication authentication = Authentication.getAuthentication(threadContext);
|
||||||
|
return authentication.getUser();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,217 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.core.security.authz.accesscontrol;
|
|
||||||
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
|
||||||
import org.apache.lucene.search.BooleanQuery;
|
|
||||||
import org.apache.lucene.search.BulkScorer;
|
|
||||||
import org.apache.lucene.search.CollectionTerminatedException;
|
|
||||||
import org.apache.lucene.search.Collector;
|
|
||||||
import org.apache.lucene.search.ConjunctionDISI;
|
|
||||||
import org.apache.lucene.search.ConstantScoreQuery;
|
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
|
||||||
import org.apache.lucene.search.LeafCollector;
|
|
||||||
import org.apache.lucene.search.Scorer;
|
|
||||||
import org.apache.lucene.search.Weight;
|
|
||||||
import org.apache.lucene.util.BitSet;
|
|
||||||
import org.apache.lucene.util.BitSetIterator;
|
|
||||||
import org.apache.lucene.util.Bits;
|
|
||||||
import org.apache.lucene.util.SparseFixedBitSet;
|
|
||||||
import org.elasticsearch.ExceptionsHelper;
|
|
||||||
import org.elasticsearch.common.logging.LoggerMessageFormat;
|
|
||||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
|
||||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
|
||||||
import org.elasticsearch.index.engine.EngineException;
|
|
||||||
import org.elasticsearch.index.query.QueryShardContext;
|
|
||||||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
|
||||||
import org.elasticsearch.index.shard.ShardUtils;
|
|
||||||
import org.elasticsearch.license.XPackLicenseState;
|
|
||||||
import org.elasticsearch.script.ScriptService;
|
|
||||||
import org.elasticsearch.xpack.core.security.authc.Authentication;
|
|
||||||
import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField;
|
|
||||||
import org.elasticsearch.xpack.core.security.authz.accesscontrol.DocumentSubsetReader.DocumentSubsetDirectoryReader;
|
|
||||||
import org.elasticsearch.xpack.core.security.authz.permission.DocumentPermissions;
|
|
||||||
import org.elasticsearch.xpack.core.security.support.Exceptions;
|
|
||||||
import org.elasticsearch.xpack.core.security.user.User;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.function.Function;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* An {@link IndexSearcherWrapper} implementation that is used for field and document level security.
|
|
||||||
* <p>
|
|
||||||
* Based on the {@link ThreadContext} this class will enable field and/or document level security.
|
|
||||||
* <p>
|
|
||||||
* Field level security is enabled by wrapping the original {@link DirectoryReader} in a {@link FieldSubsetReader}
|
|
||||||
* in the {@link #wrap(DirectoryReader)} method.
|
|
||||||
* <p>
|
|
||||||
* Document level security is enabled by wrapping the original {@link DirectoryReader} in a {@link DocumentSubsetReader}
|
|
||||||
* instance.
|
|
||||||
*/
|
|
||||||
public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper {
|
|
||||||
private static final Logger logger = LogManager.getLogger(SecurityIndexSearcherWrapper.class);
|
|
||||||
|
|
||||||
private final Function<ShardId, QueryShardContext> queryShardContextProvider;
|
|
||||||
private final BitsetFilterCache bitsetFilterCache;
|
|
||||||
private final XPackLicenseState licenseState;
|
|
||||||
private final ThreadContext threadContext;
|
|
||||||
private final ScriptService scriptService;
|
|
||||||
|
|
||||||
public SecurityIndexSearcherWrapper(Function<ShardId, QueryShardContext> queryShardContextProvider,
|
|
||||||
BitsetFilterCache bitsetFilterCache, ThreadContext threadContext, XPackLicenseState licenseState,
|
|
||||||
ScriptService scriptService) {
|
|
||||||
this.scriptService = scriptService;
|
|
||||||
this.queryShardContextProvider = queryShardContextProvider;
|
|
||||||
this.bitsetFilterCache = bitsetFilterCache;
|
|
||||||
this.threadContext = threadContext;
|
|
||||||
this.licenseState = licenseState;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected DirectoryReader wrap(final DirectoryReader reader) {
|
|
||||||
if (licenseState.isDocumentAndFieldLevelSecurityAllowed() == false) {
|
|
||||||
return reader;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
final IndicesAccessControl indicesAccessControl = getIndicesAccessControl();
|
|
||||||
|
|
||||||
ShardId shardId = ShardUtils.extractShardId(reader);
|
|
||||||
if (shardId == null) {
|
|
||||||
throw new IllegalStateException(LoggerMessageFormat.format("couldn't extract shardId from reader [{}]", reader));
|
|
||||||
}
|
|
||||||
|
|
||||||
final IndicesAccessControl.IndexAccessControl permissions = indicesAccessControl.getIndexPermissions(shardId.getIndexName());
|
|
||||||
// No permissions have been defined for an index, so don't intercept the index reader for access control
|
|
||||||
if (permissions == null) {
|
|
||||||
return reader;
|
|
||||||
}
|
|
||||||
|
|
||||||
DirectoryReader wrappedReader = reader;
|
|
||||||
DocumentPermissions documentPermissions = permissions.getDocumentPermissions();
|
|
||||||
if (documentPermissions != null && documentPermissions.hasDocumentLevelPermissions()) {
|
|
||||||
BooleanQuery filterQuery = documentPermissions.filter(getUser(), scriptService, shardId, queryShardContextProvider);
|
|
||||||
if (filterQuery != null) {
|
|
||||||
wrappedReader = DocumentSubsetReader.wrap(wrappedReader, bitsetFilterCache, new ConstantScoreQuery(filterQuery));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return permissions.getFieldPermissions().filter(wrappedReader);
|
|
||||||
} catch (IOException e) {
|
|
||||||
logger.error("Unable to apply field level security");
|
|
||||||
throw ExceptionsHelper.convertToElastic(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
|
|
||||||
if (licenseState.isDocumentAndFieldLevelSecurityAllowed() == false) {
|
|
||||||
return searcher;
|
|
||||||
}
|
|
||||||
|
|
||||||
final DirectoryReader directoryReader = (DirectoryReader) searcher.getIndexReader();
|
|
||||||
if (directoryReader instanceof DocumentSubsetDirectoryReader) {
|
|
||||||
// The reasons why we return a custom searcher:
|
|
||||||
// 1) in the case the role query is sparse then large part of the main query can be skipped
|
|
||||||
// 2) If the role query doesn't match with any docs in a segment, that a segment can be skipped
|
|
||||||
IndexSearcher searcherWrapper = new IndexSearcherWrapper((DocumentSubsetDirectoryReader) directoryReader);
|
|
||||||
searcherWrapper.setQueryCache(searcher.getQueryCache());
|
|
||||||
searcherWrapper.setQueryCachingPolicy(searcher.getQueryCachingPolicy());
|
|
||||||
searcherWrapper.setSimilarity(searcher.getSimilarity());
|
|
||||||
return searcherWrapper;
|
|
||||||
}
|
|
||||||
return searcher;
|
|
||||||
}
|
|
||||||
|
|
||||||
static class IndexSearcherWrapper extends IndexSearcher {
|
|
||||||
|
|
||||||
IndexSearcherWrapper(DocumentSubsetDirectoryReader r) {
|
|
||||||
super(r);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
|
|
||||||
for (LeafReaderContext ctx : leaves) { // search each subreader
|
|
||||||
final LeafCollector leafCollector;
|
|
||||||
try {
|
|
||||||
leafCollector = collector.getLeafCollector(ctx);
|
|
||||||
} catch (CollectionTerminatedException e) {
|
|
||||||
// there is no doc of interest in this reader context
|
|
||||||
// continue with the following leaf
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
// The reader is always of type DocumentSubsetReader when we get here:
|
|
||||||
DocumentSubsetReader reader = (DocumentSubsetReader) ctx.reader();
|
|
||||||
|
|
||||||
BitSet roleQueryBits = reader.getRoleQueryBits();
|
|
||||||
if (roleQueryBits == null) {
|
|
||||||
// nothing matches with the role query, so skip this segment:
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// if the role query result set is sparse then we should use the SparseFixedBitSet for advancing:
|
|
||||||
if (roleQueryBits instanceof SparseFixedBitSet) {
|
|
||||||
Scorer scorer = weight.scorer(ctx);
|
|
||||||
if (scorer != null) {
|
|
||||||
SparseFixedBitSet sparseFixedBitSet = (SparseFixedBitSet) roleQueryBits;
|
|
||||||
Bits realLiveDocs = reader.getWrappedLiveDocs();
|
|
||||||
try {
|
|
||||||
intersectScorerAndRoleBits(scorer, sparseFixedBitSet, leafCollector, realLiveDocs);
|
|
||||||
} catch (CollectionTerminatedException e) {
|
|
||||||
// collection was terminated prematurely
|
|
||||||
// continue with the following leaf
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
BulkScorer bulkScorer = weight.bulkScorer(ctx);
|
|
||||||
if (bulkScorer != null) {
|
|
||||||
Bits liveDocs = reader.getLiveDocs();
|
|
||||||
try {
|
|
||||||
bulkScorer.score(leafCollector, liveDocs);
|
|
||||||
} catch (CollectionTerminatedException e) {
|
|
||||||
// collection was terminated prematurely
|
|
||||||
// continue with the following leaf
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static void intersectScorerAndRoleBits(Scorer scorer, SparseFixedBitSet roleBits, LeafCollector collector, Bits acceptDocs) throws
|
|
||||||
IOException {
|
|
||||||
// ConjunctionDISI uses the DocIdSetIterator#cost() to order the iterators, so if roleBits has the lowest cardinality it should
|
|
||||||
// be used first:
|
|
||||||
DocIdSetIterator iterator = ConjunctionDISI.intersectIterators(Arrays.asList(new BitSetIterator(roleBits,
|
|
||||||
roleBits.approximateCardinality()), scorer.iterator()));
|
|
||||||
for (int docId = iterator.nextDoc(); docId < DocIdSetIterator.NO_MORE_DOCS; docId = iterator.nextDoc()) {
|
|
||||||
if (acceptDocs == null || acceptDocs.get(docId)) {
|
|
||||||
collector.collect(docId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected IndicesAccessControl getIndicesAccessControl() {
|
|
||||||
IndicesAccessControl indicesAccessControl = threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY);
|
|
||||||
if (indicesAccessControl == null) {
|
|
||||||
throw Exceptions.authorizationError("no indices permissions found");
|
|
||||||
}
|
|
||||||
return indicesAccessControl;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected User getUser(){
|
|
||||||
Authentication authentication = Authentication.getAuthentication(threadContext);
|
|
||||||
return authentication.getUser();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -83,7 +83,7 @@ public class SourceOnlySnapshotShardTests extends IndexShardTestCase {
|
||||||
.primaryTerm(0, primaryTerm)
|
.primaryTerm(0, primaryTerm)
|
||||||
.putMapping("_doc",
|
.putMapping("_doc",
|
||||||
"{\"_source\":{\"enabled\": false}}").build();
|
"{\"_source\":{\"enabled\": false}}").build();
|
||||||
IndexShard shard = newShard(shardRouting, metaData, new InternalEngineFactory());
|
IndexShard shard = newShard(shardRouting, metaData, null, new InternalEngineFactory());
|
||||||
recoverShardFromStore(shard);
|
recoverShardFromStore(shard);
|
||||||
|
|
||||||
for (int i = 0; i < 1; i++) {
|
for (int i = 0; i < 1; i++) {
|
||||||
|
@ -278,7 +278,7 @@ public class SourceOnlySnapshotShardTests extends IndexShardTestCase {
|
||||||
.settings(settings)
|
.settings(settings)
|
||||||
.primaryTerm(0, primaryTerm);
|
.primaryTerm(0, primaryTerm);
|
||||||
metaData.putMapping(mapping);
|
metaData.putMapping(mapping);
|
||||||
IndexShard targetShard = newShard(targetShardRouting, metaData.build(), new InternalEngineFactory());
|
IndexShard targetShard = newShard(targetShardRouting, metaData.build(), null, new InternalEngineFactory());
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
try {
|
try {
|
||||||
recoverShardFromStore(targetShard);
|
recoverShardFromStore(targetShard);
|
||||||
|
|
|
@ -39,6 +39,7 @@ import org.elasticsearch.index.query.TermsQueryBuilder;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
import org.elasticsearch.license.XPackLicenseState;
|
import org.elasticsearch.license.XPackLicenseState;
|
||||||
import org.elasticsearch.script.ScriptService;
|
import org.elasticsearch.script.ScriptService;
|
||||||
|
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||||
import org.elasticsearch.test.AbstractBuilderTestCase;
|
import org.elasticsearch.test.AbstractBuilderTestCase;
|
||||||
import org.elasticsearch.test.IndexSettingsModule;
|
import org.elasticsearch.test.IndexSettingsModule;
|
||||||
import org.elasticsearch.xpack.core.security.authc.Authentication;
|
import org.elasticsearch.xpack.core.security.authc.Authentication;
|
||||||
|
@ -62,7 +63,7 @@ import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.spy;
|
import static org.mockito.Mockito.spy;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
public class SecurityIndexSearcherWrapperIntegrationTests extends AbstractBuilderTestCase {
|
public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderTestCase {
|
||||||
|
|
||||||
public void testDLS() throws Exception {
|
public void testDLS() throws Exception {
|
||||||
ShardId shardId = new ShardId("_index", "_na_", 0);
|
ShardId shardId = new ShardId("_index", "_na_", 0);
|
||||||
|
@ -99,7 +100,7 @@ public class SecurityIndexSearcherWrapperIntegrationTests extends AbstractBuilde
|
||||||
});
|
});
|
||||||
XPackLicenseState licenseState = mock(XPackLicenseState.class);
|
XPackLicenseState licenseState = mock(XPackLicenseState.class);
|
||||||
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true);
|
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true);
|
||||||
SecurityIndexSearcherWrapper wrapper = new SecurityIndexSearcherWrapper(s -> queryShardContext,
|
SecurityIndexReaderWrapper wrapper = new SecurityIndexReaderWrapper(s -> queryShardContext,
|
||||||
bitsetFilterCache, threadContext, licenseState, scriptService) {
|
bitsetFilterCache, threadContext, licenseState, scriptService) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -156,8 +157,9 @@ public class SecurityIndexSearcherWrapperIntegrationTests extends AbstractBuilde
|
||||||
ParsedQuery parsedQuery = new ParsedQuery(new TermQuery(new Term("field", values[i])));
|
ParsedQuery parsedQuery = new ParsedQuery(new TermQuery(new Term("field", values[i])));
|
||||||
doReturn(new TermQueryBuilder("field", values[i])).when(queryShardContext).parseInnerQueryBuilder(any(XContentParser.class));
|
doReturn(new TermQueryBuilder("field", values[i])).when(queryShardContext).parseInnerQueryBuilder(any(XContentParser.class));
|
||||||
when(queryShardContext.toQuery(new TermsQueryBuilder("field", values[i]))).thenReturn(parsedQuery);
|
when(queryShardContext.toQuery(new TermsQueryBuilder("field", values[i]))).thenReturn(parsedQuery);
|
||||||
DirectoryReader wrappedDirectoryReader = wrapper.wrap(directoryReader);
|
DirectoryReader wrappedDirectoryReader = wrapper.apply(directoryReader);
|
||||||
IndexSearcher indexSearcher = wrapper.wrap(new IndexSearcher(wrappedDirectoryReader));
|
IndexSearcher indexSearcher = new ContextIndexSearcher(wrappedDirectoryReader,
|
||||||
|
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy());
|
||||||
|
|
||||||
int expectedHitCount = valuesHitCount[i];
|
int expectedHitCount = valuesHitCount[i];
|
||||||
logger.info("Going to verify hit count with query [{}] with expected total hits [{}]", parsedQuery.query(), expectedHitCount);
|
logger.info("Going to verify hit count with query [{}] with expected total hits [{}]", parsedQuery.query(), expectedHitCount);
|
||||||
|
@ -222,7 +224,7 @@ public class SecurityIndexSearcherWrapperIntegrationTests extends AbstractBuilde
|
||||||
|
|
||||||
XPackLicenseState licenseState = mock(XPackLicenseState.class);
|
XPackLicenseState licenseState = mock(XPackLicenseState.class);
|
||||||
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true);
|
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true);
|
||||||
SecurityIndexSearcherWrapper wrapper = new SecurityIndexSearcherWrapper(s -> queryShardContext,
|
SecurityIndexReaderWrapper wrapper = new SecurityIndexReaderWrapper(s -> queryShardContext,
|
||||||
bitsetFilterCache, threadContext, licenseState, scriptService) {
|
bitsetFilterCache, threadContext, licenseState, scriptService) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -259,8 +261,9 @@ public class SecurityIndexSearcherWrapperIntegrationTests extends AbstractBuilde
|
||||||
iw.close();
|
iw.close();
|
||||||
|
|
||||||
DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId);
|
DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId);
|
||||||
DirectoryReader wrappedDirectoryReader = wrapper.wrap(directoryReader);
|
DirectoryReader wrappedDirectoryReader = wrapper.apply(directoryReader);
|
||||||
IndexSearcher indexSearcher = wrapper.wrap(new IndexSearcher(wrappedDirectoryReader));
|
IndexSearcher indexSearcher = new ContextIndexSearcher(wrappedDirectoryReader,
|
||||||
|
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy());
|
||||||
|
|
||||||
ScoreDoc[] hits = indexSearcher.search(new MatchAllDocsQuery(), 1000).scoreDocs;
|
ScoreDoc[] hits = indexSearcher.search(new MatchAllDocsQuery(), 1000).scoreDocs;
|
||||||
Set<Integer> actualDocIds = new HashSet<>();
|
Set<Integer> actualDocIds = new HashSet<>();
|
|
@ -0,0 +1,225 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.core.security.authz.accesscontrol;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
|
import org.apache.lucene.index.IndexWriter;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.store.MMapDirectory;
|
||||||
|
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||||
|
import org.elasticsearch.index.Index;
|
||||||
|
import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
|
||||||
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
|
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
|
||||||
|
import org.elasticsearch.index.mapper.SourceFieldMapper;
|
||||||
|
import org.elasticsearch.index.shard.IndexShard;
|
||||||
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
|
import org.elasticsearch.license.XPackLicenseState;
|
||||||
|
import org.elasticsearch.script.ScriptService;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.elasticsearch.xpack.core.security.authz.permission.DocumentPermissions;
|
||||||
|
import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions;
|
||||||
|
import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import static java.util.Collections.singletonMap;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.sameInstance;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
public class SecurityIndexReaderWrapperUnitTests extends ESTestCase {
|
||||||
|
|
||||||
|
private static final Set<String> META_FIELDS;
|
||||||
|
static {
|
||||||
|
final Set<String> metaFields = new HashSet<>(Arrays.asList(MapperService.getAllMetaFields()));
|
||||||
|
metaFields.add(SourceFieldMapper.NAME);
|
||||||
|
metaFields.add(FieldNamesFieldMapper.NAME);
|
||||||
|
metaFields.add(SeqNoFieldMapper.NAME);
|
||||||
|
META_FIELDS = Collections.unmodifiableSet(metaFields);
|
||||||
|
}
|
||||||
|
|
||||||
|
private ThreadContext threadContext;
|
||||||
|
private ScriptService scriptService;
|
||||||
|
private SecurityIndexReaderWrapper securityIndexReaderWrapper;
|
||||||
|
private ElasticsearchDirectoryReader esIn;
|
||||||
|
private XPackLicenseState licenseState;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setup() throws Exception {
|
||||||
|
Index index = new Index("_index", "testUUID");
|
||||||
|
scriptService = mock(ScriptService.class);
|
||||||
|
|
||||||
|
ShardId shardId = new ShardId(index, 0);
|
||||||
|
licenseState = mock(XPackLicenseState.class);
|
||||||
|
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true);
|
||||||
|
threadContext = new ThreadContext(Settings.EMPTY);
|
||||||
|
IndexShard indexShard = mock(IndexShard.class);
|
||||||
|
when(indexShard.shardId()).thenReturn(shardId);
|
||||||
|
|
||||||
|
Directory directory = new MMapDirectory(createTempDir());
|
||||||
|
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig());
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
DirectoryReader in = DirectoryReader.open(directory); // unfortunately DirectoryReader isn't mock friendly
|
||||||
|
esIn = ElasticsearchDirectoryReader.wrap(in, shardId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void tearDown() throws Exception {
|
||||||
|
super.tearDown();
|
||||||
|
esIn.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDefaultMetaFields() throws Exception {
|
||||||
|
securityIndexReaderWrapper =
|
||||||
|
new SecurityIndexReaderWrapper(null, null, threadContext, licenseState, scriptService) {
|
||||||
|
@Override
|
||||||
|
protected IndicesAccessControl getIndicesAccessControl() {
|
||||||
|
IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true,
|
||||||
|
new FieldPermissions(fieldPermissionDef(new String[]{}, null)), DocumentPermissions.allowAll());
|
||||||
|
return new IndicesAccessControl(true, singletonMap("_index", indexAccessControl));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
FieldSubsetReader.FieldSubsetDirectoryReader result =
|
||||||
|
(FieldSubsetReader.FieldSubsetDirectoryReader) securityIndexReaderWrapper.apply(esIn);
|
||||||
|
assertThat(result.getFilter().run("_uid"), is(true));
|
||||||
|
assertThat(result.getFilter().run("_id"), is(true));
|
||||||
|
assertThat(result.getFilter().run("_version"), is(true));
|
||||||
|
assertThat(result.getFilter().run("_type"), is(true));
|
||||||
|
assertThat(result.getFilter().run("_source"), is(true));
|
||||||
|
assertThat(result.getFilter().run("_routing"), is(true));
|
||||||
|
assertThat(result.getFilter().run("_timestamp"), is(true));
|
||||||
|
assertThat(result.getFilter().run("_ttl"), is(true));
|
||||||
|
assertThat(result.getFilter().run("_size"), is(true));
|
||||||
|
assertThat(result.getFilter().run("_index"), is(true));
|
||||||
|
assertThat(result.getFilter().run("_field_names"), is(true));
|
||||||
|
assertThat(result.getFilter().run("_seq_no"), is(true));
|
||||||
|
assertThat(result.getFilter().run("_some_random_meta_field"), is(true));
|
||||||
|
assertThat(result.getFilter().run("some_random_regular_field"), is(false));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testWrapReaderWhenFeatureDisabled() throws Exception {
|
||||||
|
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(false);
|
||||||
|
securityIndexReaderWrapper =
|
||||||
|
new SecurityIndexReaderWrapper(null, null, threadContext, licenseState, scriptService);
|
||||||
|
DirectoryReader reader = securityIndexReaderWrapper.apply(esIn);
|
||||||
|
assertThat(reader, sameInstance(esIn));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testWildcards() throws Exception {
|
||||||
|
Set<String> expected = new HashSet<>(META_FIELDS);
|
||||||
|
expected.add("field1_a");
|
||||||
|
expected.add("field1_b");
|
||||||
|
expected.add("field1_c");
|
||||||
|
assertResolved(new FieldPermissions(fieldPermissionDef(new String[] {"field1*"}, null)), expected, "field", "field2");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDotNotion() throws Exception {
|
||||||
|
Set<String> expected = new HashSet<>(META_FIELDS);
|
||||||
|
expected.add("foo.bar");
|
||||||
|
assertResolved(new FieldPermissions(fieldPermissionDef(new String[] {"foo.bar"}, null)), expected, "foo", "foo.baz", "bar.foo");
|
||||||
|
|
||||||
|
expected = new HashSet<>(META_FIELDS);
|
||||||
|
expected.add("foo.bar");
|
||||||
|
assertResolved(new FieldPermissions(fieldPermissionDef(new String[] {"foo.*"}, null)), expected, "foo", "bar");
|
||||||
|
}
|
||||||
|
|
||||||
|
private void assertResolved(FieldPermissions permissions, Set<String> expected, String... fieldsToTest) {
|
||||||
|
for (String field : expected) {
|
||||||
|
assertThat(field, permissions.grantsAccessTo(field), is(true));
|
||||||
|
}
|
||||||
|
for (String field : fieldsToTest) {
|
||||||
|
assertThat(field, permissions.grantsAccessTo(field), is(expected.contains(field)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testFieldPermissionsWithFieldExceptions() throws Exception {
|
||||||
|
securityIndexReaderWrapper =
|
||||||
|
new SecurityIndexReaderWrapper(null, null, threadContext, licenseState, null);
|
||||||
|
String[] grantedFields = new String[]{};
|
||||||
|
String[] deniedFields;
|
||||||
|
Set<String> expected = new HashSet<>(META_FIELDS);
|
||||||
|
// Presence of fields in a role with an empty array implies access to no fields except the meta fields
|
||||||
|
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, randomBoolean() ? null : new String[]{})),
|
||||||
|
expected, "foo", "bar");
|
||||||
|
|
||||||
|
// make sure meta fields cannot be denied access to
|
||||||
|
deniedFields = META_FIELDS.toArray(new String[0]);
|
||||||
|
assertResolved(new FieldPermissions(fieldPermissionDef(null, deniedFields)),
|
||||||
|
new HashSet<>(Arrays.asList("foo", "bar", "_some_plugin_meta_field")));
|
||||||
|
|
||||||
|
// check we can add all fields with *
|
||||||
|
grantedFields = new String[]{"*"};
|
||||||
|
expected = new HashSet<>(META_FIELDS);
|
||||||
|
expected.add("foo");
|
||||||
|
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, randomBoolean() ? null : new String[]{})), expected);
|
||||||
|
|
||||||
|
// same with null
|
||||||
|
grantedFields = null;
|
||||||
|
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, randomBoolean() ? null : new String[]{})), expected);
|
||||||
|
|
||||||
|
// check we remove only excluded fields
|
||||||
|
grantedFields = new String[]{"*"};
|
||||||
|
deniedFields = new String[]{"xfield"};
|
||||||
|
expected = new HashSet<>(META_FIELDS);
|
||||||
|
expected.add("foo");
|
||||||
|
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "xfield");
|
||||||
|
|
||||||
|
// same with null
|
||||||
|
grantedFields = null;
|
||||||
|
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "xfield");
|
||||||
|
|
||||||
|
// some other checks
|
||||||
|
grantedFields = new String[]{"field*"};
|
||||||
|
deniedFields = new String[]{"field1", "field2"};
|
||||||
|
expected = new HashSet<>(META_FIELDS);
|
||||||
|
expected.add("field3");
|
||||||
|
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "field1", "field2");
|
||||||
|
|
||||||
|
grantedFields = new String[]{"field1", "field2"};
|
||||||
|
deniedFields = new String[]{"field2"};
|
||||||
|
expected = new HashSet<>(META_FIELDS);
|
||||||
|
expected.add("field1");
|
||||||
|
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "field1", "field2");
|
||||||
|
|
||||||
|
grantedFields = new String[]{"field*"};
|
||||||
|
deniedFields = new String[]{"field2"};
|
||||||
|
expected = new HashSet<>(META_FIELDS);
|
||||||
|
expected.add("field1");
|
||||||
|
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "field2");
|
||||||
|
|
||||||
|
deniedFields = new String[]{"field*"};
|
||||||
|
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)),
|
||||||
|
META_FIELDS, "field1", "field2");
|
||||||
|
|
||||||
|
// empty array for allowed fields always means no field is allowed
|
||||||
|
grantedFields = new String[]{};
|
||||||
|
deniedFields = new String[]{};
|
||||||
|
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)),
|
||||||
|
META_FIELDS, "field1", "field2");
|
||||||
|
|
||||||
|
// make sure all field can be explicitly allowed
|
||||||
|
grantedFields = new String[]{"*"};
|
||||||
|
deniedFields = randomBoolean() ? null : new String[]{};
|
||||||
|
expected = new HashSet<>(META_FIELDS);
|
||||||
|
expected.add("field1");
|
||||||
|
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static FieldPermissionsDefinition fieldPermissionDef(String[] granted, String[] denied) {
|
||||||
|
return new FieldPermissionsDefinition(granted, denied);
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,561 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.core.security.authz.accesscontrol;
|
|
||||||
|
|
||||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.document.Field;
|
|
||||||
import org.apache.lucene.document.Field.Store;
|
|
||||||
import org.apache.lucene.document.StringField;
|
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
|
||||||
import org.apache.lucene.index.IndexReader;
|
|
||||||
import org.apache.lucene.index.IndexWriter;
|
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
|
||||||
import org.apache.lucene.index.NoMergePolicy;
|
|
||||||
import org.apache.lucene.index.PostingsEnum;
|
|
||||||
import org.apache.lucene.index.Term;
|
|
||||||
import org.apache.lucene.index.TermsEnum;
|
|
||||||
import org.apache.lucene.misc.SweetSpotSimilarity;
|
|
||||||
import org.apache.lucene.search.BulkScorer;
|
|
||||||
import org.apache.lucene.search.Explanation;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
|
||||||
import org.apache.lucene.search.LeafCollector;
|
|
||||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
|
||||||
import org.apache.lucene.search.Query;
|
|
||||||
import org.apache.lucene.search.QueryCachingPolicy;
|
|
||||||
import org.apache.lucene.search.Scorer;
|
|
||||||
import org.apache.lucene.search.TermQuery;
|
|
||||||
import org.apache.lucene.search.Weight;
|
|
||||||
import org.apache.lucene.store.Directory;
|
|
||||||
import org.apache.lucene.store.MMapDirectory;
|
|
||||||
import org.apache.lucene.util.Accountable;
|
|
||||||
import org.apache.lucene.util.BitSet;
|
|
||||||
import org.apache.lucene.util.FixedBitSet;
|
|
||||||
import org.apache.lucene.util.SparseFixedBitSet;
|
|
||||||
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
|
||||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
|
||||||
import org.elasticsearch.core.internal.io.IOUtils;
|
|
||||||
import org.elasticsearch.index.Index;
|
|
||||||
import org.elasticsearch.index.IndexSettings;
|
|
||||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
|
||||||
import org.elasticsearch.index.engine.Engine;
|
|
||||||
import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
|
||||||
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.SourceFieldMapper;
|
|
||||||
import org.elasticsearch.index.shard.IndexShard;
|
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
|
||||||
import org.elasticsearch.license.XPackLicenseState;
|
|
||||||
import org.elasticsearch.script.ScriptService;
|
|
||||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
|
||||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
|
||||||
import org.elasticsearch.test.IndexSettingsModule;
|
|
||||||
import org.elasticsearch.xpack.core.security.authz.accesscontrol.DocumentSubsetReader.DocumentSubsetDirectoryReader;
|
|
||||||
import org.elasticsearch.xpack.core.security.authz.permission.DocumentPermissions;
|
|
||||||
import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions;
|
|
||||||
import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition;
|
|
||||||
import org.junit.After;
|
|
||||||
import org.junit.Before;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.IdentityHashMap;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import static java.util.Collections.singletonMap;
|
|
||||||
import static org.elasticsearch.xpack.core.security.authz.accesscontrol.SecurityIndexSearcherWrapper.intersectScorerAndRoleBits;
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
|
||||||
import static org.hamcrest.Matchers.instanceOf;
|
|
||||||
import static org.hamcrest.Matchers.is;
|
|
||||||
import static org.hamcrest.Matchers.not;
|
|
||||||
import static org.hamcrest.Matchers.sameInstance;
|
|
||||||
import static org.mockito.Mockito.mock;
|
|
||||||
import static org.mockito.Mockito.when;
|
|
||||||
|
|
||||||
public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
|
|
||||||
|
|
||||||
private static final Set<String> META_FIELDS;
|
|
||||||
static {
|
|
||||||
final Set<String> metaFields = new HashSet<>(Arrays.asList(MapperService.getAllMetaFields()));
|
|
||||||
metaFields.add(SourceFieldMapper.NAME);
|
|
||||||
metaFields.add(FieldNamesFieldMapper.NAME);
|
|
||||||
metaFields.add(SeqNoFieldMapper.NAME);
|
|
||||||
META_FIELDS = Collections.unmodifiableSet(metaFields);
|
|
||||||
}
|
|
||||||
|
|
||||||
private ThreadContext threadContext;
|
|
||||||
private ScriptService scriptService;
|
|
||||||
private SecurityIndexSearcherWrapper securityIndexSearcherWrapper;
|
|
||||||
private ElasticsearchDirectoryReader esIn;
|
|
||||||
private XPackLicenseState licenseState;
|
|
||||||
private IndexSettings indexSettings;
|
|
||||||
|
|
||||||
@Before
|
|
||||||
public void setup() throws Exception {
|
|
||||||
Index index = new Index("_index", "testUUID");
|
|
||||||
scriptService = mock(ScriptService.class);
|
|
||||||
indexSettings = IndexSettingsModule.newIndexSettings(index, Settings.EMPTY);
|
|
||||||
|
|
||||||
ShardId shardId = new ShardId(index, 0);
|
|
||||||
licenseState = mock(XPackLicenseState.class);
|
|
||||||
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true);
|
|
||||||
threadContext = new ThreadContext(Settings.EMPTY);
|
|
||||||
IndexShard indexShard = mock(IndexShard.class);
|
|
||||||
when(indexShard.shardId()).thenReturn(shardId);
|
|
||||||
|
|
||||||
Directory directory = new MMapDirectory(createTempDir());
|
|
||||||
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig());
|
|
||||||
writer.close();
|
|
||||||
|
|
||||||
DirectoryReader in = DirectoryReader.open(directory); // unfortunately DirectoryReader isn't mock friendly
|
|
||||||
esIn = ElasticsearchDirectoryReader.wrap(in, shardId);
|
|
||||||
}
|
|
||||||
|
|
||||||
@After
|
|
||||||
public void tearDown() throws Exception {
|
|
||||||
super.tearDown();
|
|
||||||
esIn.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDefaultMetaFields() throws Exception {
|
|
||||||
securityIndexSearcherWrapper =
|
|
||||||
new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService) {
|
|
||||||
@Override
|
|
||||||
protected IndicesAccessControl getIndicesAccessControl() {
|
|
||||||
IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true,
|
|
||||||
new FieldPermissions(fieldPermissionDef(new String[]{}, null)), DocumentPermissions.allowAll());
|
|
||||||
return new IndicesAccessControl(true, singletonMap("_index", indexAccessControl));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
FieldSubsetReader.FieldSubsetDirectoryReader result =
|
|
||||||
(FieldSubsetReader.FieldSubsetDirectoryReader) securityIndexSearcherWrapper.wrap(esIn);
|
|
||||||
assertThat(result.getFilter().run("_uid"), is(true));
|
|
||||||
assertThat(result.getFilter().run("_id"), is(true));
|
|
||||||
assertThat(result.getFilter().run("_version"), is(true));
|
|
||||||
assertThat(result.getFilter().run("_type"), is(true));
|
|
||||||
assertThat(result.getFilter().run("_source"), is(true));
|
|
||||||
assertThat(result.getFilter().run("_routing"), is(true));
|
|
||||||
assertThat(result.getFilter().run("_timestamp"), is(true));
|
|
||||||
assertThat(result.getFilter().run("_ttl"), is(true));
|
|
||||||
assertThat(result.getFilter().run("_size"), is(true));
|
|
||||||
assertThat(result.getFilter().run("_index"), is(true));
|
|
||||||
assertThat(result.getFilter().run("_field_names"), is(true));
|
|
||||||
assertThat(result.getFilter().run("_seq_no"), is(true));
|
|
||||||
assertThat(result.getFilter().run("_some_random_meta_field"), is(true));
|
|
||||||
assertThat(result.getFilter().run("some_random_regular_field"), is(false));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testWrapReaderWhenFeatureDisabled() throws Exception {
|
|
||||||
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(false);
|
|
||||||
securityIndexSearcherWrapper =
|
|
||||||
new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService);
|
|
||||||
DirectoryReader reader = securityIndexSearcherWrapper.wrap(esIn);
|
|
||||||
assertThat(reader, sameInstance(esIn));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testWrapSearcherWhenFeatureDisabled() throws Exception {
|
|
||||||
securityIndexSearcherWrapper =
|
|
||||||
new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService);
|
|
||||||
IndexSearcher indexSearcher = new IndexSearcher(esIn);
|
|
||||||
IndexSearcher result = securityIndexSearcherWrapper.wrap(indexSearcher);
|
|
||||||
assertThat(result, sameInstance(indexSearcher));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testWildcards() throws Exception {
|
|
||||||
Set<String> expected = new HashSet<>(META_FIELDS);
|
|
||||||
expected.add("field1_a");
|
|
||||||
expected.add("field1_b");
|
|
||||||
expected.add("field1_c");
|
|
||||||
assertResolved(new FieldPermissions(fieldPermissionDef(new String[] {"field1*"}, null)), expected, "field", "field2");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDotNotion() throws Exception {
|
|
||||||
Set<String> expected = new HashSet<>(META_FIELDS);
|
|
||||||
expected.add("foo.bar");
|
|
||||||
assertResolved(new FieldPermissions(fieldPermissionDef(new String[] {"foo.bar"}, null)), expected, "foo", "foo.baz", "bar.foo");
|
|
||||||
|
|
||||||
expected = new HashSet<>(META_FIELDS);
|
|
||||||
expected.add("foo.bar");
|
|
||||||
assertResolved(new FieldPermissions(fieldPermissionDef(new String[] {"foo.*"}, null)), expected, "foo", "bar");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDelegateSimilarity() throws Exception {
|
|
||||||
IndexSettings settings = IndexSettingsModule.newIndexSettings("_index", Settings.EMPTY);
|
|
||||||
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(settings, new BitsetFilterCache.Listener() {
|
|
||||||
@Override
|
|
||||||
public void onCache(ShardId shardId, Accountable accountable) {
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onRemoval(ShardId shardId, Accountable accountable) {
|
|
||||||
|
|
||||||
}
|
|
||||||
});
|
|
||||||
DirectoryReader directoryReader = DocumentSubsetReader.wrap(esIn, bitsetFilterCache, new MatchAllDocsQuery());
|
|
||||||
IndexSearcher indexSearcher = new IndexSearcher(directoryReader);
|
|
||||||
indexSearcher.setSimilarity(new SweetSpotSimilarity());
|
|
||||||
indexSearcher.setQueryCachingPolicy(new QueryCachingPolicy() {
|
|
||||||
@Override
|
|
||||||
public void onUse(Query query) {
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean shouldCache(Query query) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
indexSearcher.setQueryCache((weight, policy) -> weight);
|
|
||||||
securityIndexSearcherWrapper =
|
|
||||||
new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService);
|
|
||||||
IndexSearcher result = securityIndexSearcherWrapper.wrap(indexSearcher);
|
|
||||||
assertThat(result, not(sameInstance(indexSearcher)));
|
|
||||||
assertThat(result.getSimilarity(), sameInstance(indexSearcher.getSimilarity()));
|
|
||||||
assertThat(result.getQueryCachingPolicy(), sameInstance(indexSearcher.getQueryCachingPolicy()));
|
|
||||||
assertThat(result.getQueryCache(), sameInstance(indexSearcher.getQueryCache()));
|
|
||||||
bitsetFilterCache.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testIntersectScorerAndRoleBits() throws Exception {
|
|
||||||
securityIndexSearcherWrapper =
|
|
||||||
new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, scriptService);
|
|
||||||
final Directory directory = newDirectory();
|
|
||||||
IndexWriter iw = new IndexWriter(
|
|
||||||
directory,
|
|
||||||
new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE)
|
|
||||||
);
|
|
||||||
|
|
||||||
Document document = new Document();
|
|
||||||
document.add(new StringField("field1", "value1", Field.Store.NO));
|
|
||||||
document.add(new StringField("field2", "value1", Field.Store.NO));
|
|
||||||
iw.addDocument(document);
|
|
||||||
|
|
||||||
document = new Document();
|
|
||||||
document.add(new StringField("field1", "value2", Field.Store.NO));
|
|
||||||
document.add(new StringField("field2", "value1", Field.Store.NO));
|
|
||||||
iw.addDocument(document);
|
|
||||||
|
|
||||||
document = new Document();
|
|
||||||
document.add(new StringField("field1", "value3", Field.Store.NO));
|
|
||||||
document.add(new StringField("field2", "value1", Field.Store.NO));
|
|
||||||
iw.addDocument(document);
|
|
||||||
|
|
||||||
document = new Document();
|
|
||||||
document.add(new StringField("field1", "value4", Field.Store.NO));
|
|
||||||
document.add(new StringField("field2", "value1", Field.Store.NO));
|
|
||||||
iw.addDocument(document);
|
|
||||||
|
|
||||||
iw.commit();
|
|
||||||
iw.deleteDocuments(new Term("field1", "value3"));
|
|
||||||
iw.close();
|
|
||||||
DirectoryReader directoryReader = DirectoryReader.open(directory);
|
|
||||||
IndexSearcher searcher = new IndexSearcher(directoryReader);
|
|
||||||
Weight weight = searcher.createWeight(new TermQuery(new Term("field2", "value1")),
|
|
||||||
org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES, 1f);
|
|
||||||
|
|
||||||
LeafReaderContext leaf = directoryReader.leaves().get(0);
|
|
||||||
|
|
||||||
SparseFixedBitSet sparseFixedBitSet = query(leaf, "field1", "value1");
|
|
||||||
LeafCollector leafCollector = new LeafBucketCollector() {
|
|
||||||
@Override
|
|
||||||
public void collect(int doc, long bucket) throws IOException {
|
|
||||||
assertThat(doc, equalTo(0));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
intersectScorerAndRoleBits(weight.scorer(leaf), sparseFixedBitSet, leafCollector, leaf.reader().getLiveDocs());
|
|
||||||
|
|
||||||
sparseFixedBitSet = query(leaf, "field1", "value2");
|
|
||||||
leafCollector = new LeafBucketCollector() {
|
|
||||||
@Override
|
|
||||||
public void collect(int doc, long bucket) throws IOException {
|
|
||||||
assertThat(doc, equalTo(1));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
intersectScorerAndRoleBits(weight.scorer(leaf), sparseFixedBitSet, leafCollector, leaf.reader().getLiveDocs());
|
|
||||||
|
|
||||||
|
|
||||||
sparseFixedBitSet = query(leaf, "field1", "value3");
|
|
||||||
leafCollector = new LeafBucketCollector() {
|
|
||||||
@Override
|
|
||||||
public void collect(int doc, long bucket) throws IOException {
|
|
||||||
fail("docId [" + doc + "] should have been deleted");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
intersectScorerAndRoleBits(weight.scorer(leaf), sparseFixedBitSet, leafCollector, leaf.reader().getLiveDocs());
|
|
||||||
|
|
||||||
sparseFixedBitSet = query(leaf, "field1", "value4");
|
|
||||||
leafCollector = new LeafBucketCollector() {
|
|
||||||
@Override
|
|
||||||
public void collect(int doc, long bucket) throws IOException {
|
|
||||||
assertThat(doc, equalTo(3));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
intersectScorerAndRoleBits(weight.scorer(leaf), sparseFixedBitSet, leafCollector, leaf.reader().getLiveDocs());
|
|
||||||
|
|
||||||
directoryReader.close();
|
|
||||||
directory.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
private void assertResolved(FieldPermissions permissions, Set<String> expected, String... fieldsToTest) {
|
|
||||||
for (String field : expected) {
|
|
||||||
assertThat(field, permissions.grantsAccessTo(field), is(true));
|
|
||||||
}
|
|
||||||
for (String field : fieldsToTest) {
|
|
||||||
assertThat(field, permissions.grantsAccessTo(field), is(expected.contains(field)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testFieldPermissionsWithFieldExceptions() throws Exception {
|
|
||||||
securityIndexSearcherWrapper =
|
|
||||||
new SecurityIndexSearcherWrapper(null, null, threadContext, licenseState, null);
|
|
||||||
String[] grantedFields = new String[]{};
|
|
||||||
String[] deniedFields;
|
|
||||||
Set<String> expected = new HashSet<>(META_FIELDS);
|
|
||||||
// Presence of fields in a role with an empty array implies access to no fields except the meta fields
|
|
||||||
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, randomBoolean() ? null : new String[]{})),
|
|
||||||
expected, "foo", "bar");
|
|
||||||
|
|
||||||
// make sure meta fields cannot be denied access to
|
|
||||||
deniedFields = META_FIELDS.toArray(new String[0]);
|
|
||||||
assertResolved(new FieldPermissions(fieldPermissionDef(null, deniedFields)),
|
|
||||||
new HashSet<>(Arrays.asList("foo", "bar", "_some_plugin_meta_field")));
|
|
||||||
|
|
||||||
// check we can add all fields with *
|
|
||||||
grantedFields = new String[]{"*"};
|
|
||||||
expected = new HashSet<>(META_FIELDS);
|
|
||||||
expected.add("foo");
|
|
||||||
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, randomBoolean() ? null : new String[]{})), expected);
|
|
||||||
|
|
||||||
// same with null
|
|
||||||
grantedFields = null;
|
|
||||||
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, randomBoolean() ? null : new String[]{})), expected);
|
|
||||||
|
|
||||||
// check we remove only excluded fields
|
|
||||||
grantedFields = new String[]{"*"};
|
|
||||||
deniedFields = new String[]{"xfield"};
|
|
||||||
expected = new HashSet<>(META_FIELDS);
|
|
||||||
expected.add("foo");
|
|
||||||
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "xfield");
|
|
||||||
|
|
||||||
// same with null
|
|
||||||
grantedFields = null;
|
|
||||||
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "xfield");
|
|
||||||
|
|
||||||
// some other checks
|
|
||||||
grantedFields = new String[]{"field*"};
|
|
||||||
deniedFields = new String[]{"field1", "field2"};
|
|
||||||
expected = new HashSet<>(META_FIELDS);
|
|
||||||
expected.add("field3");
|
|
||||||
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "field1", "field2");
|
|
||||||
|
|
||||||
grantedFields = new String[]{"field1", "field2"};
|
|
||||||
deniedFields = new String[]{"field2"};
|
|
||||||
expected = new HashSet<>(META_FIELDS);
|
|
||||||
expected.add("field1");
|
|
||||||
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "field1", "field2");
|
|
||||||
|
|
||||||
grantedFields = new String[]{"field*"};
|
|
||||||
deniedFields = new String[]{"field2"};
|
|
||||||
expected = new HashSet<>(META_FIELDS);
|
|
||||||
expected.add("field1");
|
|
||||||
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected, "field2");
|
|
||||||
|
|
||||||
deniedFields = new String[]{"field*"};
|
|
||||||
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)),
|
|
||||||
META_FIELDS, "field1", "field2");
|
|
||||||
|
|
||||||
// empty array for allowed fields always means no field is allowed
|
|
||||||
grantedFields = new String[]{};
|
|
||||||
deniedFields = new String[]{};
|
|
||||||
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)),
|
|
||||||
META_FIELDS, "field1", "field2");
|
|
||||||
|
|
||||||
// make sure all field can be explicitly allowed
|
|
||||||
grantedFields = new String[]{"*"};
|
|
||||||
deniedFields = randomBoolean() ? null : new String[]{};
|
|
||||||
expected = new HashSet<>(META_FIELDS);
|
|
||||||
expected.add("field1");
|
|
||||||
assertResolved(new FieldPermissions(fieldPermissionDef(grantedFields, deniedFields)), expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
private SparseFixedBitSet query(LeafReaderContext leaf, String field, String value) throws IOException {
|
|
||||||
SparseFixedBitSet sparseFixedBitSet = new SparseFixedBitSet(leaf.reader().maxDoc());
|
|
||||||
TermsEnum tenum = leaf.reader().terms(field).iterator();
|
|
||||||
while (tenum.next().utf8ToString().equals(value) == false) {
|
|
||||||
}
|
|
||||||
PostingsEnum penum = tenum.postings(null);
|
|
||||||
sparseFixedBitSet.or(penum);
|
|
||||||
return sparseFixedBitSet;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testIndexSearcherWrapperSparseNoDeletions() throws IOException {
|
|
||||||
doTestIndexSearcherWrapper(true, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testIndexSearcherWrapperDenseNoDeletions() throws IOException {
|
|
||||||
doTestIndexSearcherWrapper(false, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testIndexSearcherWrapperSparseWithDeletions() throws IOException {
|
|
||||||
doTestIndexSearcherWrapper(true, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testIndexSearcherWrapperDenseWithDeletions() throws IOException {
|
|
||||||
doTestIndexSearcherWrapper(false, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
static class CreateScorerOnceWeight extends Weight {
|
|
||||||
|
|
||||||
private final Weight weight;
|
|
||||||
private final Set<Object> seenLeaves = Collections.newSetFromMap(new IdentityHashMap<>());
|
|
||||||
|
|
||||||
protected CreateScorerOnceWeight(Weight weight) {
|
|
||||||
super(weight.getQuery());
|
|
||||||
this.weight = weight;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void extractTerms(Set<Term> terms) {
|
|
||||||
weight.extractTerms(terms);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
|
||||||
return weight.explain(context, doc);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
|
||||||
assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey()));
|
|
||||||
return weight.scorer(context);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public BulkScorer bulkScorer(LeafReaderContext context)
|
|
||||||
throws IOException {
|
|
||||||
assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey()));
|
|
||||||
return weight.bulkScorer(context);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isCacheable(LeafReaderContext ctx) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static class CreateScorerOnceQuery extends Query {
|
|
||||||
|
|
||||||
private final Query query;
|
|
||||||
|
|
||||||
CreateScorerOnceQuery(Query query) {
|
|
||||||
this.query = query;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString(String field) {
|
|
||||||
return query.toString(field);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Query rewrite(IndexReader reader) throws IOException {
|
|
||||||
Query queryRewritten = query.rewrite(reader);
|
|
||||||
if (query != queryRewritten) {
|
|
||||||
return new CreateScorerOnceQuery(queryRewritten);
|
|
||||||
}
|
|
||||||
return super.rewrite(reader);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Weight createWeight(IndexSearcher searcher, org.apache.lucene.search.ScoreMode scoreMode, float boost) throws IOException {
|
|
||||||
return new CreateScorerOnceWeight(query.createWeight(searcher, scoreMode, boost));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object obj) {
|
|
||||||
return sameClassAs(obj) && query.equals(((CreateScorerOnceQuery) obj).query);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return 31 * classHash() + query.hashCode();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void doTestIndexSearcherWrapper(boolean sparse, boolean deletions) throws IOException {
|
|
||||||
Directory dir = newDirectory();
|
|
||||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(null));
|
|
||||||
Document doc = new Document();
|
|
||||||
StringField allowedField = new StringField("allowed", "yes", Store.NO);
|
|
||||||
doc.add(allowedField);
|
|
||||||
StringField fooField = new StringField("foo", "bar", Store.NO);
|
|
||||||
doc.add(fooField);
|
|
||||||
StringField deleteField = new StringField("delete", "no", Store.NO);
|
|
||||||
doc.add(deleteField);
|
|
||||||
w.addDocument(doc);
|
|
||||||
if (deletions) {
|
|
||||||
// add a document that matches foo:bar but will be deleted
|
|
||||||
deleteField.setStringValue("yes");
|
|
||||||
w.addDocument(doc);
|
|
||||||
deleteField.setStringValue("no");
|
|
||||||
}
|
|
||||||
allowedField.setStringValue("no");
|
|
||||||
w.addDocument(doc);
|
|
||||||
if (sparse) {
|
|
||||||
for (int i = 0; i < 1000; ++i) {
|
|
||||||
w.addDocument(doc);
|
|
||||||
}
|
|
||||||
w.forceMerge(1);
|
|
||||||
}
|
|
||||||
w.deleteDocuments(new Term("delete", "yes"));
|
|
||||||
|
|
||||||
IndexSettings settings = IndexSettingsModule.newIndexSettings("_index", Settings.EMPTY);
|
|
||||||
BitsetFilterCache.Listener listener = new BitsetFilterCache.Listener() {
|
|
||||||
@Override
|
|
||||||
public void onCache(ShardId shardId, Accountable accountable) {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onRemoval(ShardId shardId, Accountable accountable) {
|
|
||||||
|
|
||||||
}
|
|
||||||
};
|
|
||||||
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(w), new ShardId(indexSettings.getIndex(), 0));
|
|
||||||
BitsetFilterCache cache = new BitsetFilterCache(settings, listener);
|
|
||||||
Query roleQuery = new TermQuery(new Term("allowed", "yes"));
|
|
||||||
BitSet bitSet = cache.getBitSetProducer(roleQuery).getBitSet(reader.leaves().get(0));
|
|
||||||
if (sparse) {
|
|
||||||
assertThat(bitSet, instanceOf(SparseFixedBitSet.class));
|
|
||||||
} else {
|
|
||||||
assertThat(bitSet, instanceOf(FixedBitSet.class));
|
|
||||||
}
|
|
||||||
|
|
||||||
DocumentSubsetDirectoryReader filteredReader = DocumentSubsetReader.wrap(reader, cache, roleQuery);
|
|
||||||
IndexSearcher wrapSearcher = new SecurityIndexSearcherWrapper.IndexSearcherWrapper(filteredReader);
|
|
||||||
Engine.Searcher engineSearcher = new Engine.Searcher("test", wrapSearcher, () -> {});
|
|
||||||
ContextIndexSearcher searcher = new ContextIndexSearcher(engineSearcher,
|
|
||||||
wrapSearcher.getQueryCache(), wrapSearcher.getQueryCachingPolicy());
|
|
||||||
searcher.setCheckCancelled(() -> {});
|
|
||||||
|
|
||||||
// Searching a non-existing term will trigger a null scorer
|
|
||||||
assertEquals(0, searcher.count(new TermQuery(new Term("non_existing_field", "non_existing_value"))));
|
|
||||||
|
|
||||||
assertEquals(1, searcher.count(new TermQuery(new Term("foo", "bar"))));
|
|
||||||
|
|
||||||
// make sure scorers are created only once, see #1725
|
|
||||||
assertEquals(1, searcher.count(new CreateScorerOnceQuery(new MatchAllDocsQuery())));
|
|
||||||
IOUtils.close(reader, w, dir);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static FieldPermissionsDefinition fieldPermissionDef(String[] granted, String[] denied) {
|
|
||||||
return new FieldPermissionsDefinition(granted, denied);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -119,7 +119,7 @@ import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken
|
||||||
import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine;
|
import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine;
|
||||||
import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField;
|
import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField;
|
||||||
import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl;
|
import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl;
|
||||||
import org.elasticsearch.xpack.core.security.authz.accesscontrol.SecurityIndexSearcherWrapper;
|
import org.elasticsearch.xpack.core.security.authz.accesscontrol.SecurityIndexReaderWrapper;
|
||||||
import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions;
|
import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions;
|
||||||
import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache;
|
import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache;
|
||||||
import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore;
|
import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore;
|
||||||
|
@ -692,8 +692,8 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
|
||||||
if (enabled) {
|
if (enabled) {
|
||||||
assert getLicenseState() != null;
|
assert getLicenseState() != null;
|
||||||
if (XPackSettings.DLS_FLS_ENABLED.get(settings)) {
|
if (XPackSettings.DLS_FLS_ENABLED.get(settings)) {
|
||||||
module.setSearcherWrapper(indexService ->
|
module.setReaderWrapper(indexService ->
|
||||||
new SecurityIndexSearcherWrapper(
|
new SecurityIndexReaderWrapper(
|
||||||
shardId -> indexService.newQueryShardContext(shardId.id(),
|
shardId -> indexService.newQueryShardContext(shardId.id(),
|
||||||
// we pass a null index reader, which is legal and will disable rewrite optimizations
|
// we pass a null index reader, which is legal and will disable rewrite optimizations
|
||||||
// based on index statistics, which is probably safer...
|
// based on index statistics, which is probably safer...
|
||||||
|
|
Loading…
Reference in New Issue