Fix deprecations introduced by the upgrade to Lucene 5.3
This changes construction of Phrase and Boolean queries to use the builder, and replaces BitDocIdSetFilter with BitSetProducer for nested and parent/child queries. I had to remove the ParentIdsFilter for the case when there was a single parent as it was using the source of BitSets for parents as a regular Filter, which is not possible anymore now. I don't think this is an issue since this case rarely occurs, and the alternative logic for when there are several matching parent ids should not be much worse.
This commit is contained in:
parent
c5b39ce85e
commit
4f5591be8d
|
@ -27,6 +27,7 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.index.TermContext;
|
||||
import org.apache.lucene.index.TermState;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.DisjunctionMaxQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -299,7 +300,8 @@ public abstract class BlendedTermQuery extends Query {
|
|||
return new BlendedTermQuery(terms, boosts) {
|
||||
@Override
|
||||
protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) {
|
||||
BooleanQuery query = new BooleanQuery(disableCoord);
|
||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||
query.setDisableCoord(disableCoord);
|
||||
for (int i = 0; i < terms.length; i++) {
|
||||
TermQuery termQuery = new TermQuery(terms[i], ctx[i]);
|
||||
if (boosts != null) {
|
||||
|
@ -307,7 +309,7 @@ public abstract class BlendedTermQuery extends Query {
|
|||
}
|
||||
query.add(termQuery, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
return query;
|
||||
return query.build();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -316,9 +318,10 @@ public abstract class BlendedTermQuery extends Query {
|
|||
return new BlendedTermQuery(terms, boosts) {
|
||||
@Override
|
||||
protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) {
|
||||
BooleanQuery query = new BooleanQuery(true);
|
||||
BooleanQuery high = new BooleanQuery(disableCoord);
|
||||
BooleanQuery low = new BooleanQuery(disableCoord);
|
||||
BooleanQuery.Builder highBuilder = new BooleanQuery.Builder();
|
||||
highBuilder.setDisableCoord(disableCoord);
|
||||
BooleanQuery.Builder lowBuilder = new BooleanQuery.Builder();
|
||||
lowBuilder.setDisableCoord(disableCoord);
|
||||
for (int i = 0; i < terms.length; i++) {
|
||||
TermQuery termQuery = new TermQuery(terms[i], ctx[i]);
|
||||
if (boosts != null) {
|
||||
|
@ -327,22 +330,28 @@ public abstract class BlendedTermQuery extends Query {
|
|||
if ((maxTermFrequency >= 1f && docFreqs[i] > maxTermFrequency)
|
||||
|| (docFreqs[i] > (int) Math.ceil(maxTermFrequency
|
||||
* (float) maxDoc))) {
|
||||
high.add(termQuery, BooleanClause.Occur.SHOULD);
|
||||
highBuilder.add(termQuery, BooleanClause.Occur.SHOULD);
|
||||
} else {
|
||||
low.add(termQuery, BooleanClause.Occur.SHOULD);
|
||||
lowBuilder.add(termQuery, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
}
|
||||
BooleanQuery high = highBuilder.build();
|
||||
BooleanQuery low = lowBuilder.build();
|
||||
if (low.clauses().isEmpty()) {
|
||||
BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder();
|
||||
queryBuilder.setDisableCoord(disableCoord);
|
||||
for (BooleanClause booleanClause : high) {
|
||||
booleanClause.setOccur(BooleanClause.Occur.MUST);
|
||||
queryBuilder.add(booleanClause.getQuery(), Occur.MUST);
|
||||
}
|
||||
return high;
|
||||
return queryBuilder.build();
|
||||
} else if (high.clauses().isEmpty()) {
|
||||
return low;
|
||||
} else {
|
||||
query.add(high, BooleanClause.Occur.SHOULD);
|
||||
query.add(low, BooleanClause.Occur.MUST);
|
||||
return query;
|
||||
return new BooleanQuery.Builder()
|
||||
.setDisableCoord(true)
|
||||
.add(high, BooleanClause.Occur.SHOULD)
|
||||
.add(low, BooleanClause.Occur.MUST)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -104,12 +104,13 @@ public class CustomFieldQuery extends FieldQuery {
|
|||
* It seems expensive but most queries will pretty small.
|
||||
*/
|
||||
if (currentPos == terms.size()) {
|
||||
PhraseQuery query = new PhraseQuery();
|
||||
query.setBoost(orig.getBoost());
|
||||
query.setSlop(orig.getSlop());
|
||||
PhraseQuery.Builder queryBuilder = new PhraseQuery.Builder();
|
||||
queryBuilder.setSlop(orig.getSlop());
|
||||
for (int i = 0; i < termsIdx.length; i++) {
|
||||
query.add(terms.get(i)[termsIdx[i]], pos[i]);
|
||||
queryBuilder.add(terms.get(i)[termsIdx[i]], pos[i]);
|
||||
}
|
||||
PhraseQuery query = queryBuilder.build();
|
||||
query.setBoost(orig.getBoost());
|
||||
this.flatten(query, reader, flatQueries);
|
||||
} else {
|
||||
Term[] t = terms.get(currentPos);
|
||||
|
|
|
@ -163,11 +163,11 @@ public class MoreLikeThisQuery extends Query {
|
|||
}
|
||||
|
||||
private Query createQuery(XMoreLikeThis mlt) throws IOException {
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
BooleanQuery.Builder bqBuilder = new BooleanQuery.Builder();
|
||||
if (this.likeFields != null) {
|
||||
Query mltQuery = mlt.like(this.likeFields);
|
||||
mltQuery = Queries.applyMinimumShouldMatch((BooleanQuery) mltQuery, minimumShouldMatch);
|
||||
bq.add(mltQuery, BooleanClause.Occur.SHOULD);
|
||||
bqBuilder.add(mltQuery, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
if (this.likeText != null) {
|
||||
Reader[] readers = new Reader[likeText.length];
|
||||
|
@ -177,9 +177,10 @@ public class MoreLikeThisQuery extends Query {
|
|||
//LUCENE 4 UPGRADE this mapps the 3.6 behavior (only use the first field)
|
||||
Query mltQuery = mlt.like(moreLikeFields[0], readers);
|
||||
mltQuery = Queries.applyMinimumShouldMatch((BooleanQuery) mltQuery, minimumShouldMatch);
|
||||
bq.add(mltQuery, BooleanClause.Occur.SHOULD);
|
||||
bqBuilder.add(mltQuery, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
|
||||
BooleanQuery bq = bqBuilder.build();
|
||||
bq.setBoost(getBoost());
|
||||
return bq;
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ public class Queries {
|
|||
|
||||
/** Return a query that matches no document. */
|
||||
public static Query newMatchNoDocsQuery() {
|
||||
return new BooleanQuery();
|
||||
return new BooleanQuery.Builder().build();
|
||||
}
|
||||
|
||||
public static Filter newNestedFilter() {
|
||||
|
@ -64,10 +64,10 @@ public class Queries {
|
|||
|
||||
/** Return a query that matches all documents but those that match the given query. */
|
||||
public static Query not(Query q) {
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
bq.add(new MatchAllDocsQuery(), Occur.MUST);
|
||||
bq.add(q, Occur.MUST_NOT);
|
||||
return bq;
|
||||
return new BooleanQuery.Builder()
|
||||
.add(new MatchAllDocsQuery(), Occur.MUST)
|
||||
.add(q, Occur.MUST_NOT)
|
||||
.build();
|
||||
}
|
||||
|
||||
public static boolean isNegativeQuery(Query q) {
|
||||
|
@ -86,9 +86,14 @@ public class Queries {
|
|||
|
||||
public static Query fixNegativeQueryIfNeeded(Query q) {
|
||||
if (isNegativeQuery(q)) {
|
||||
BooleanQuery newBq = (BooleanQuery) q.clone();
|
||||
newBq.add(newMatchAllQuery(), BooleanClause.Occur.MUST);
|
||||
return newBq;
|
||||
BooleanQuery bq = (BooleanQuery) q;
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
builder.setDisableCoord(bq.isCoordDisabled());
|
||||
for (BooleanClause clause : bq) {
|
||||
builder.add(clause);
|
||||
}
|
||||
builder.add(newMatchAllQuery(), BooleanClause.Occur.MUST);
|
||||
return builder.build();
|
||||
}
|
||||
return q;
|
||||
}
|
||||
|
|
|
@ -73,7 +73,7 @@ public class IndexAliasesService extends AbstractIndexComponent {
|
|||
return parse(alias);
|
||||
} else {
|
||||
// we need to bench here a bit, to see maybe it makes sense to use OrFilter
|
||||
BooleanQuery combined = new BooleanQuery();
|
||||
BooleanQuery.Builder combined = new BooleanQuery.Builder();
|
||||
for (String aliasName : aliasNames) {
|
||||
AliasMetaData alias = this.aliases.get(aliasName);
|
||||
if (alias == null) {
|
||||
|
@ -88,7 +88,7 @@ public class IndexAliasesService extends AbstractIndexComponent {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
return combined;
|
||||
return combined.build();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -24,14 +24,17 @@ import com.google.common.cache.CacheBuilder;
|
|||
import com.google.common.cache.RemovalListener;
|
||||
import com.google.common.cache.RemovalNotification;
|
||||
|
||||
import org.apache.lucene.index.IndexReaderContext;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.index.ReaderUtil;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.util.BitDocIdSet;
|
||||
import org.apache.lucene.util.SparseFixedBitSet;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
|
@ -56,6 +59,7 @@ import java.io.Closeable;
|
|||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
@ -69,13 +73,13 @@ import java.util.concurrent.Executor;
|
|||
* and require that it should always be around should use this cache, otherwise the
|
||||
* {@link org.elasticsearch.index.cache.query.QueryCache} should be used instead.
|
||||
*/
|
||||
public class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener<Object, Cache<Filter, BitsetFilterCache.Value>>, Closeable {
|
||||
public class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener<Object, Cache<Query, BitsetFilterCache.Value>>, Closeable {
|
||||
|
||||
public static final String LOAD_RANDOM_ACCESS_FILTERS_EAGERLY = "index.load_fixed_bitset_filters_eagerly";
|
||||
|
||||
private final boolean loadRandomAccessFiltersEagerly;
|
||||
private final Cache<Object, Cache<Filter, Value>> loadedFilters;
|
||||
private final BitDocIdSetFilterWarmer warmer;
|
||||
private final Cache<Object, Cache<Query, Value>> loadedFilters;
|
||||
private final BitSetProducerWarmer warmer;
|
||||
|
||||
private IndexService indexService;
|
||||
private IndicesWarmer indicesWarmer;
|
||||
|
@ -85,7 +89,7 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
|
|||
super(index, indexSettings);
|
||||
this.loadRandomAccessFiltersEagerly = indexSettings.getAsBoolean(LOAD_RANDOM_ACCESS_FILTERS_EAGERLY, true);
|
||||
this.loadedFilters = CacheBuilder.newBuilder().removalListener(this).build();
|
||||
this.warmer = new BitDocIdSetFilterWarmer();
|
||||
this.warmer = new BitSetProducerWarmer();
|
||||
}
|
||||
|
||||
@Inject(optional = true)
|
||||
|
@ -101,9 +105,8 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
|
|||
indicesWarmer.addListener(warmer);
|
||||
}
|
||||
|
||||
public BitDocIdSetFilter getBitDocIdSetFilter(Filter filter) {
|
||||
assert filter != null;
|
||||
return new BitDocIdSetFilterWrapper(filter);
|
||||
public BitSetProducer getBitSetProducer(Query query) {
|
||||
return new QueryWrapperBitSetProducer(query);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -122,38 +125,29 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
|
|||
loadedFilters.invalidateAll();
|
||||
}
|
||||
|
||||
private BitDocIdSet getAndLoadIfNotPresent(final Filter filter, final LeafReaderContext context) throws IOException, ExecutionException {
|
||||
private BitSet getAndLoadIfNotPresent(final Query query, final LeafReaderContext context) throws IOException, ExecutionException {
|
||||
final Object coreCacheReader = context.reader().getCoreCacheKey();
|
||||
final ShardId shardId = ShardUtils.extractShardId(context.reader());
|
||||
Cache<Filter, Value> filterToFbs = loadedFilters.get(coreCacheReader, new Callable<Cache<Filter, Value>>() {
|
||||
Cache<Query, Value> filterToFbs = loadedFilters.get(coreCacheReader, new Callable<Cache<Query, Value>>() {
|
||||
@Override
|
||||
public Cache<Filter, Value> call() throws Exception {
|
||||
public Cache<Query, Value> call() throws Exception {
|
||||
context.reader().addCoreClosedListener(BitsetFilterCache.this);
|
||||
return CacheBuilder.newBuilder().build();
|
||||
}
|
||||
});
|
||||
return filterToFbs.get(filter, new Callable<Value>() {
|
||||
return filterToFbs.get(query, new Callable<Value>() {
|
||||
@Override
|
||||
public Value call() throws Exception {
|
||||
DocIdSet docIdSet = filter.getDocIdSet(context, null);
|
||||
final BitDocIdSet bitSet;
|
||||
if (docIdSet instanceof BitDocIdSet) {
|
||||
bitSet = (BitDocIdSet) docIdSet;
|
||||
final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context);
|
||||
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
|
||||
searcher.setQueryCache(null);
|
||||
final Weight weight = searcher.createNormalizedWeight(query, false);
|
||||
final DocIdSetIterator it = weight.scorer(context);
|
||||
final BitSet bitSet;
|
||||
if (it == null) {
|
||||
bitSet = null;
|
||||
} else {
|
||||
BitDocIdSet.Builder builder = new BitDocIdSet.Builder(context.reader().maxDoc());
|
||||
if (docIdSet != null && docIdSet != DocIdSet.EMPTY) {
|
||||
DocIdSetIterator iterator = docIdSet.iterator();
|
||||
// some filters (QueryWrapperFilter) return not null or DocIdSet.EMPTY if there no matching docs
|
||||
if (iterator != null) {
|
||||
builder.or(iterator);
|
||||
}
|
||||
}
|
||||
BitDocIdSet bits = builder.build();
|
||||
// code expects this to be non-null
|
||||
if (bits == null) {
|
||||
bits = new BitDocIdSet(new SparseFixedBitSet(context.reader().maxDoc()), 0);
|
||||
}
|
||||
bitSet = bits;
|
||||
bitSet = BitSet.of(it, context.reader().maxDoc());
|
||||
}
|
||||
|
||||
Value value = new Value(bitSet, shardId);
|
||||
|
@ -169,18 +163,18 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onRemoval(RemovalNotification<Object, Cache<Filter, Value>> notification) {
|
||||
public void onRemoval(RemovalNotification<Object, Cache<Query, Value>> notification) {
|
||||
Object key = notification.getKey();
|
||||
if (key == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
Cache<Filter, Value> value = notification.getValue();
|
||||
Cache<Query, Value> value = notification.getValue();
|
||||
if (value == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (Map.Entry<Filter, Value> entry : value.asMap().entrySet()) {
|
||||
for (Map.Entry<Query, Value> entry : value.asMap().entrySet()) {
|
||||
if (entry.getValue().shardId == null) {
|
||||
continue;
|
||||
}
|
||||
|
@ -195,50 +189,50 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
|
|||
|
||||
public static final class Value {
|
||||
|
||||
final BitDocIdSet bitset;
|
||||
final BitSet bitset;
|
||||
final ShardId shardId;
|
||||
|
||||
public Value(BitDocIdSet bitset, ShardId shardId) {
|
||||
public Value(BitSet bitset, ShardId shardId) {
|
||||
this.bitset = bitset;
|
||||
this.shardId = shardId;
|
||||
}
|
||||
}
|
||||
|
||||
final class BitDocIdSetFilterWrapper extends BitDocIdSetFilter {
|
||||
final class QueryWrapperBitSetProducer implements BitSetProducer {
|
||||
|
||||
final Filter filter;
|
||||
final Query query;
|
||||
|
||||
BitDocIdSetFilterWrapper(Filter filter) {
|
||||
this.filter = filter;
|
||||
QueryWrapperBitSetProducer(Query query) {
|
||||
this.query = Objects.requireNonNull(query);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BitDocIdSet getDocIdSet(LeafReaderContext context) throws IOException {
|
||||
public BitSet getBitSet(LeafReaderContext context) throws IOException {
|
||||
try {
|
||||
return getAndLoadIfNotPresent(filter, context);
|
||||
return getAndLoadIfNotPresent(query, context);
|
||||
} catch (ExecutionException e) {
|
||||
throw ExceptionsHelper.convertToElastic(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(String field) {
|
||||
return "random_access(" + filter + ")";
|
||||
public String toString() {
|
||||
return "random_access(" + query + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (!(o instanceof BitDocIdSetFilterWrapper)) return false;
|
||||
return this.filter.equals(((BitDocIdSetFilterWrapper) o).filter);
|
||||
if (!(o instanceof QueryWrapperBitSetProducer)) return false;
|
||||
return this.query.equals(((QueryWrapperBitSetProducer) o).query);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return filter.hashCode() ^ 0x1117BF26;
|
||||
return 31 * getClass().hashCode() + query.hashCode();
|
||||
}
|
||||
}
|
||||
|
||||
final class BitDocIdSetFilterWarmer extends IndicesWarmer.Listener {
|
||||
final class BitSetProducerWarmer extends IndicesWarmer.Listener {
|
||||
|
||||
@Override
|
||||
public IndicesWarmer.TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, IndicesWarmer.WarmerContext context, ThreadPool threadPool) {
|
||||
|
@ -247,7 +241,7 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
|
|||
}
|
||||
|
||||
boolean hasNested = false;
|
||||
final Set<Filter> warmUp = new HashSet<>();
|
||||
final Set<Query> warmUp = new HashSet<>();
|
||||
final MapperService mapperService = indexShard.mapperService();
|
||||
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
|
||||
if (docMapper.hasNestedObjects()) {
|
||||
|
@ -270,7 +264,7 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
|
|||
final Executor executor = threadPool.executor(executor());
|
||||
final CountDownLatch latch = new CountDownLatch(context.searcher().reader().leaves().size() * warmUp.size());
|
||||
for (final LeafReaderContext ctx : context.searcher().reader().leaves()) {
|
||||
for (final Filter filterToWarm : warmUp) {
|
||||
for (final Query filterToWarm : warmUp) {
|
||||
executor.execute(new Runnable() {
|
||||
|
||||
@Override
|
||||
|
@ -306,7 +300,7 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
|
|||
|
||||
}
|
||||
|
||||
Cache<Object, Cache<Filter, Value>> getLoadedFilters() {
|
||||
Cache<Object, Cache<Query, Value>> getLoadedFilters() {
|
||||
return loadedFilters;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,11 +20,22 @@
|
|||
package org.elasticsearch.index.engine;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.lucene.index.*;
|
||||
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.FilterLeafReader;
|
||||
import org.apache.lucene.index.IndexCommit;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.SegmentCommitInfo;
|
||||
import org.apache.lucene.index.SegmentInfos;
|
||||
import org.apache.lucene.index.SegmentReader;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.SearcherManager;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.Accountables;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
|
@ -55,7 +66,11 @@ import org.elasticsearch.index.translog.Translog;
|
|||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.locks.Condition;
|
||||
|
@ -911,13 +926,13 @@ public abstract class Engine implements Closeable {
|
|||
private final String[] filteringAliases;
|
||||
private final Query aliasFilter;
|
||||
private final String[] types;
|
||||
private final BitDocIdSetFilter parentFilter;
|
||||
private final BitSetProducer parentFilter;
|
||||
private final Operation.Origin origin;
|
||||
|
||||
private final long startTime;
|
||||
private long endTime;
|
||||
|
||||
public DeleteByQuery(Query query, BytesReference source, @Nullable String[] filteringAliases, @Nullable Query aliasFilter, BitDocIdSetFilter parentFilter, Operation.Origin origin, long startTime, String... types) {
|
||||
public DeleteByQuery(Query query, BytesReference source, @Nullable String[] filteringAliases, @Nullable Query aliasFilter, BitSetProducer parentFilter, Operation.Origin origin, long startTime, String... types) {
|
||||
this.query = query;
|
||||
this.source = source;
|
||||
this.types = types;
|
||||
|
@ -952,7 +967,7 @@ public abstract class Engine implements Closeable {
|
|||
return parentFilter != null;
|
||||
}
|
||||
|
||||
public BitDocIdSetFilter parentFilter() {
|
||||
public BitSetProducer parentFilter() {
|
||||
return parentFilter;
|
||||
}
|
||||
|
||||
|
|
|
@ -638,10 +638,10 @@ public class InternalEngine extends Engine {
|
|||
try {
|
||||
Query query = delete.query();
|
||||
if (delete.aliasFilter() != null) {
|
||||
BooleanQuery boolQuery = new BooleanQuery();
|
||||
boolQuery.add(query, Occur.MUST);
|
||||
boolQuery.add(delete.aliasFilter(), Occur.FILTER);
|
||||
query = boolQuery;
|
||||
query = new BooleanQuery.Builder()
|
||||
.add(query, Occur.MUST)
|
||||
.add(delete.aliasFilter(), Occur.FILTER)
|
||||
.build();
|
||||
}
|
||||
if (delete.nested()) {
|
||||
query = new IncludeNestedDocsQuery(query, delete.parentFilter());
|
||||
|
|
|
@ -19,11 +19,15 @@
|
|||
|
||||
package org.elasticsearch.index.fielddata;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.FieldComparatorSource;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.util.BitDocIdSet;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -119,10 +123,10 @@ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexCompone
|
|||
*/
|
||||
public static class Nested {
|
||||
|
||||
private final BitDocIdSetFilter rootFilter;
|
||||
private final BitSetProducer rootFilter;
|
||||
private final Filter innerFilter;
|
||||
|
||||
public Nested(BitDocIdSetFilter rootFilter, Filter innerFilter) {
|
||||
public Nested(BitSetProducer rootFilter, Filter innerFilter) {
|
||||
this.rootFilter = rootFilter;
|
||||
this.innerFilter = innerFilter;
|
||||
}
|
||||
|
@ -130,8 +134,8 @@ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexCompone
|
|||
/**
|
||||
* Get a {@link BitDocIdSet} that matches the root documents.
|
||||
*/
|
||||
public BitDocIdSet rootDocs(LeafReaderContext ctx) throws IOException {
|
||||
return rootFilter.getDocIdSet(ctx);
|
||||
public BitSet rootDocs(LeafReaderContext ctx) throws IOException {
|
||||
return rootFilter.getBitSet(ctx);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -94,7 +94,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
|
|||
if (nested == null) {
|
||||
selectedValues = sortMode.select(values);
|
||||
} else {
|
||||
final BitSet rootDocs = nested.rootDocs(context).bits();
|
||||
final BitSet rootDocs = nested.rootDocs(context);
|
||||
final DocIdSet innerDocs = nested.innerDocs(context);
|
||||
selectedValues = sortMode.select(values, rootDocs, innerDocs);
|
||||
}
|
||||
|
@ -124,7 +124,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
|
|||
if (nested == null) {
|
||||
selectedValues = sortMode.select(values, nonNullMissingBytes);
|
||||
} else {
|
||||
final BitSet rootDocs = nested.rootDocs(context).bits();
|
||||
final BitSet rootDocs = nested.rootDocs(context);
|
||||
final DocIdSet innerDocs = nested.innerDocs(context);
|
||||
selectedValues = sortMode.select(values, nonNullMissingBytes, rootDocs, innerDocs, context.reader().maxDoc());
|
||||
}
|
||||
|
|
|
@ -78,7 +78,7 @@ public class DoubleValuesComparatorSource extends IndexFieldData.XFieldComparato
|
|||
if (nested == null) {
|
||||
selectedValues = sortMode.select(values, dMissingValue);
|
||||
} else {
|
||||
final BitSet rootDocs = nested.rootDocs(context).bits();
|
||||
final BitSet rootDocs = nested.rootDocs(context);
|
||||
final DocIdSet innerDocs = nested.innerDocs(context);
|
||||
selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc());
|
||||
}
|
||||
|
|
|
@ -70,7 +70,7 @@ public class FloatValuesComparatorSource extends IndexFieldData.XFieldComparator
|
|||
if (nested == null) {
|
||||
selectedValues = sortMode.select(values, dMissingValue);
|
||||
} else {
|
||||
final BitSet rootDocs = nested.rootDocs(context).bits();
|
||||
final BitSet rootDocs = nested.rootDocs(context);
|
||||
final DocIdSet innerDocs = nested.innerDocs(context);
|
||||
selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc());
|
||||
}
|
||||
|
|
|
@ -69,7 +69,7 @@ public class LongValuesComparatorSource extends IndexFieldData.XFieldComparatorS
|
|||
if (nested == null) {
|
||||
selectedValues = sortMode.select(values, dMissingValue);
|
||||
} else {
|
||||
final BitSet rootDocs = nested.rootDocs(context).bits();
|
||||
final BitSet rootDocs = nested.rootDocs(context);
|
||||
final DocIdSet innerDocs = nested.innerDocs(context);
|
||||
selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc());
|
||||
}
|
||||
|
|
|
@ -426,10 +426,10 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
|
||||
if (types == null || types.length == 0) {
|
||||
if (hasNested && filterPercolateType) {
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
bq.add(percolatorType, Occur.MUST_NOT);
|
||||
bq.add(Queries.newNonNestedFilter(), Occur.MUST);
|
||||
return new ConstantScoreQuery(bq);
|
||||
return new ConstantScoreQuery(bq.build());
|
||||
} else if (hasNested) {
|
||||
return Queries.newNonNestedFilter();
|
||||
} else if (filterPercolateType) {
|
||||
|
@ -444,10 +444,10 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
DocumentMapper docMapper = documentMapper(types[0]);
|
||||
Query filter = docMapper != null ? docMapper.typeFilter() : new TermQuery(new Term(TypeFieldMapper.NAME, types[0]));
|
||||
if (filterPercolateType) {
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
bq.add(percolatorType, Occur.MUST_NOT);
|
||||
bq.add(filter, Occur.MUST);
|
||||
return new ConstantScoreQuery(bq);
|
||||
return new ConstantScoreQuery(bq.build());
|
||||
} else {
|
||||
return filter;
|
||||
}
|
||||
|
@ -474,16 +474,16 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
}
|
||||
TermsQuery termsFilter = new TermsQuery(TypeFieldMapper.NAME, typesBytes);
|
||||
if (filterPercolateType) {
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
bq.add(percolatorType, Occur.MUST_NOT);
|
||||
bq.add(termsFilter, Occur.MUST);
|
||||
return new ConstantScoreQuery(bq);
|
||||
return new ConstantScoreQuery(bq.build());
|
||||
} else {
|
||||
return termsFilter;
|
||||
}
|
||||
} else {
|
||||
// Current bool filter requires that at least one should clause matches, even with a must clause.
|
||||
BooleanQuery bool = new BooleanQuery();
|
||||
BooleanQuery.Builder bool = new BooleanQuery.Builder();
|
||||
for (String type : types) {
|
||||
DocumentMapper docMapper = documentMapper(type);
|
||||
if (docMapper == null) {
|
||||
|
@ -499,7 +499,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
bool.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST);
|
||||
}
|
||||
|
||||
return new ConstantScoreQuery(bool);
|
||||
return new ConstantScoreQuery(bool.build());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -189,7 +189,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
return super.prefixQuery(value, method, context);
|
||||
}
|
||||
Collection<String> queryTypes = context.queryTypes();
|
||||
BooleanQuery query = new BooleanQuery();
|
||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||
for (String queryType : queryTypes) {
|
||||
PrefixQuery prefixQuery = new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))));
|
||||
if (method != null) {
|
||||
|
@ -197,7 +197,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
query.add(prefixQuery, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
return query;
|
||||
return query.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -214,7 +214,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
return regexpQuery;
|
||||
}
|
||||
BooleanQuery query = new BooleanQuery();
|
||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||
for (String queryType : queryTypes) {
|
||||
RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates);
|
||||
if (method != null) {
|
||||
|
@ -222,7 +222,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
query.add(regexpQuery, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
return query;
|
||||
return query.build();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -106,10 +106,11 @@ public class AndQueryParser implements QueryParser {
|
|||
return null;
|
||||
}
|
||||
|
||||
BooleanQuery query = new BooleanQuery();
|
||||
BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder();
|
||||
for (Query f : queries) {
|
||||
query.add(f, Occur.MUST);
|
||||
queryBuilder.add(f, Occur.MUST);
|
||||
}
|
||||
BooleanQuery query = queryBuilder.build();
|
||||
if (queryName != null) {
|
||||
parseContext.addNamedQuery(queryName, query);
|
||||
}
|
||||
|
|
|
@ -161,10 +161,12 @@ public class BoolQueryParser implements QueryParser {
|
|||
return new MatchAllDocsQuery();
|
||||
}
|
||||
|
||||
BooleanQuery booleanQuery = new BooleanQuery(disableCoord);
|
||||
BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder();
|
||||
booleanQueryBuilder.setDisableCoord(disableCoord);
|
||||
for (BooleanClause clause : clauses) {
|
||||
booleanQuery.add(clause);
|
||||
booleanQueryBuilder.add(clause);
|
||||
}
|
||||
BooleanQuery booleanQuery = booleanQueryBuilder.build();
|
||||
booleanQuery.setBoost(boost);
|
||||
booleanQuery = Queries.applyMinimumShouldMatch(booleanQuery, minimumShouldMatch);
|
||||
Query query = adjustPureNegative ? fixNegativeQueryIfNeeded(booleanQuery) : booleanQuery;
|
||||
|
|
|
@ -95,7 +95,7 @@ public class ExistsQueryParser implements QueryParser {
|
|||
return Queries.newMatchNoDocsQuery();
|
||||
}
|
||||
|
||||
BooleanQuery boolFilter = new BooleanQuery();
|
||||
BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder();
|
||||
for (String field : fields) {
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(field);
|
||||
Query filter = null;
|
||||
|
@ -115,9 +115,10 @@ public class ExistsQueryParser implements QueryParser {
|
|||
if (filter == null) {
|
||||
filter = new TermRangeQuery(field, null, null, true, true);
|
||||
}
|
||||
boolFilter.add(filter, BooleanClause.Occur.SHOULD);
|
||||
boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
|
||||
BooleanQuery boolFilter = boolFilterBuilder.build();
|
||||
if (queryName != null) {
|
||||
parseContext.addNamedQuery(queryName, boolFilter);
|
||||
}
|
||||
|
|
|
@ -159,12 +159,12 @@ public class GeoShapeQueryParser implements QueryParser {
|
|||
if (strategy instanceof RecursivePrefixTreeStrategy && shapeRelation == ShapeRelation.DISJOINT) {
|
||||
// this strategy doesn't support disjoint anymore: but it did before, including creating lucene fieldcache (!)
|
||||
// in this case, execute disjoint as exists && !intersects
|
||||
BooleanQuery bool = new BooleanQuery();
|
||||
BooleanQuery.Builder bool = new BooleanQuery.Builder();
|
||||
Query exists = ExistsQueryParser.newFilter(parseContext, fieldName, null);
|
||||
Filter intersects = strategy.makeFilter(getArgs(shape, ShapeRelation.INTERSECTS));
|
||||
bool.add(exists, BooleanClause.Occur.MUST);
|
||||
bool.add(intersects, BooleanClause.Occur.MUST_NOT);
|
||||
query = new ConstantScoreQuery(bool);
|
||||
query = new ConstantScoreQuery(bool.build());
|
||||
} else {
|
||||
query = strategy.makeQuery(getArgs(shape, shapeRelation));
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.lucene.search.Filter;
|
|||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.search.join.JoinUtil;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -167,7 +167,7 @@ public class HasChildQueryParser implements QueryParser {
|
|||
throw new QueryParsingException(parseContext, "[has_child] 'max_children' is less than 'min_children'");
|
||||
}
|
||||
|
||||
BitDocIdSetFilter nonNestedDocsFilter = null;
|
||||
BitSetProducer nonNestedDocsFilter = null;
|
||||
if (parentDocMapper.hasNestedObjects()) {
|
||||
nonNestedDocsFilter = parseContext.bitsetFilter(Queries.newNonNestedFilter());
|
||||
}
|
||||
|
|
|
@ -180,14 +180,14 @@ public class HasParentQueryParser implements QueryParser {
|
|||
parentFilter = documentMapper.typeFilter();
|
||||
}
|
||||
} else {
|
||||
BooleanQuery parentsFilter = new BooleanQuery();
|
||||
BooleanQuery.Builder parentsFilter = new BooleanQuery.Builder();
|
||||
for (String parentTypeStr : parentTypes) {
|
||||
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypeStr);
|
||||
if (documentMapper != null) {
|
||||
parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
}
|
||||
parentFilter = parentsFilter;
|
||||
parentFilter = parentsFilter.build();
|
||||
}
|
||||
|
||||
if (parentFilter == null) {
|
||||
|
|
|
@ -118,7 +118,7 @@ public class MissingQueryParser implements QueryParser {
|
|||
Query nullFilter = null;
|
||||
|
||||
if (existence) {
|
||||
BooleanQuery boolFilter = new BooleanQuery();
|
||||
BooleanQuery.Builder boolFilter = new BooleanQuery.Builder();
|
||||
for (String field : fields) {
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(field);
|
||||
Query filter = null;
|
||||
|
@ -141,7 +141,7 @@ public class MissingQueryParser implements QueryParser {
|
|||
boolFilter.add(filter, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
|
||||
existenceFilter = boolFilter;
|
||||
existenceFilter = boolFilter.build();
|
||||
existenceFilter = Queries.not(existenceFilter);;
|
||||
}
|
||||
|
||||
|
@ -157,11 +157,10 @@ public class MissingQueryParser implements QueryParser {
|
|||
Query filter;
|
||||
if (nullFilter != null) {
|
||||
if (existenceFilter != null) {
|
||||
BooleanQuery combined = new BooleanQuery();
|
||||
combined.add(existenceFilter, BooleanClause.Occur.SHOULD);
|
||||
combined.add(nullFilter, BooleanClause.Occur.SHOULD);
|
||||
// cache the not filter as well, so it will be faster
|
||||
filter = combined;
|
||||
filter = new BooleanQuery.Builder()
|
||||
.add(existenceFilter, BooleanClause.Occur.SHOULD)
|
||||
.add(nullFilter, BooleanClause.Occur.SHOULD)
|
||||
.build();
|
||||
} else {
|
||||
filter = nullFilter;
|
||||
}
|
||||
|
|
|
@ -290,14 +290,14 @@ public class MoreLikeThisQueryParser implements QueryParser {
|
|||
}
|
||||
}
|
||||
|
||||
BooleanQuery boolQuery = new BooleanQuery();
|
||||
BooleanQuery.Builder boolQuery = new BooleanQuery.Builder();
|
||||
boolQuery.add(mltQuery, BooleanClause.Occur.SHOULD);
|
||||
|
||||
// exclude the items from the search
|
||||
if (!include) {
|
||||
handleExclude(boolQuery, likeItems);
|
||||
}
|
||||
return boolQuery;
|
||||
return boolQuery.build();
|
||||
}
|
||||
|
||||
return mltQuery;
|
||||
|
@ -342,7 +342,7 @@ public class MoreLikeThisQueryParser implements QueryParser {
|
|||
return moreLikeFields;
|
||||
}
|
||||
|
||||
private void handleExclude(BooleanQuery boolQuery, MultiTermVectorsRequest likeItems) {
|
||||
private void handleExclude(BooleanQuery.Builder boolQuery, MultiTermVectorsRequest likeItems) {
|
||||
// artificial docs get assigned a random id and should be disregarded
|
||||
List<BytesRef> uids = new ArrayList<>();
|
||||
for (TermVectorsRequest item : likeItems) {
|
||||
|
|
|
@ -103,10 +103,11 @@ public class OrQueryParser implements QueryParser {
|
|||
return null;
|
||||
}
|
||||
|
||||
BooleanQuery query = new BooleanQuery();
|
||||
BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder();
|
||||
for (Query f : queries) {
|
||||
query.add(f, Occur.SHOULD);
|
||||
queryBuilder.add(f, Occur.SHOULD);
|
||||
}
|
||||
BooleanQuery query = queryBuilder.build();
|
||||
if (queryName != null) {
|
||||
parseContext.addNamedQuery(queryName, query);
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.lucene.queryparser.classic.MapperQueryParser;
|
|||
import org.apache.lucene.queryparser.classic.QueryParserSettings;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -39,7 +39,11 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperBuilders;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.support.NestedScope;
|
||||
|
@ -50,7 +54,10 @@ import org.elasticsearch.search.internal.SearchContext;
|
|||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class QueryParseContext {
|
||||
|
||||
|
@ -173,8 +180,8 @@ public class QueryParseContext {
|
|||
return queryParser;
|
||||
}
|
||||
|
||||
public BitDocIdSetFilter bitsetFilter(Filter filter) {
|
||||
return indexQueryParser.bitsetFilterCache.getBitDocIdSetFilter(filter);
|
||||
public BitSetProducer bitsetFilter(Filter filter) {
|
||||
return indexQueryParser.bitsetFilterCache.getBitSetProducer(filter);
|
||||
}
|
||||
|
||||
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType mapper) {
|
||||
|
|
|
@ -56,7 +56,8 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
|
|||
|
||||
@Override
|
||||
public Query newDefaultQuery(String text) {
|
||||
BooleanQuery bq = new BooleanQuery(true);
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
bq.setDisableCoord(true);
|
||||
for (Map.Entry<String,Float> entry : weights.entrySet()) {
|
||||
try {
|
||||
Query q = createBooleanQuery(entry.getKey(), text, super.getDefaultOperator());
|
||||
|
@ -68,7 +69,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
|
|||
rethrowUnlessLenient(e);
|
||||
}
|
||||
}
|
||||
return super.simplify(bq);
|
||||
return super.simplify(bq.build());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -80,24 +81,24 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
|
|||
if (settings.lowercaseExpandedTerms()) {
|
||||
text = text.toLowerCase(settings.locale());
|
||||
}
|
||||
BooleanQuery bq = new BooleanQuery(true);
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
bq.setDisableCoord(true);
|
||||
for (Map.Entry<String,Float> entry : weights.entrySet()) {
|
||||
try {
|
||||
Query q = new FuzzyQuery(new Term(entry.getKey(), text), fuzziness);
|
||||
if (q != null) {
|
||||
q.setBoost(entry.getValue());
|
||||
bq.add(q, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
q.setBoost(entry.getValue());
|
||||
bq.add(q, BooleanClause.Occur.SHOULD);
|
||||
} catch (RuntimeException e) {
|
||||
rethrowUnlessLenient(e);
|
||||
}
|
||||
}
|
||||
return super.simplify(bq);
|
||||
return super.simplify(bq.build());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query newPhraseQuery(String text, int slop) {
|
||||
BooleanQuery bq = new BooleanQuery(true);
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
bq.setDisableCoord(true);
|
||||
for (Map.Entry<String,Float> entry : weights.entrySet()) {
|
||||
try {
|
||||
Query q = createPhraseQuery(entry.getKey(), text, slop);
|
||||
|
@ -109,7 +110,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
|
|||
rethrowUnlessLenient(e);
|
||||
}
|
||||
}
|
||||
return super.simplify(bq);
|
||||
return super.simplify(bq.build());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -121,7 +122,8 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
|
|||
if (settings.lowercaseExpandedTerms()) {
|
||||
text = text.toLowerCase(settings.locale());
|
||||
}
|
||||
BooleanQuery bq = new BooleanQuery(true);
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
bq.setDisableCoord(true);
|
||||
for (Map.Entry<String,Float> entry : weights.entrySet()) {
|
||||
try {
|
||||
if (settings.analyzeWildcard()) {
|
||||
|
@ -137,7 +139,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
|
|||
return rethrowUnlessLenient(e);
|
||||
}
|
||||
}
|
||||
return super.simplify(bq);
|
||||
return super.simplify(bq.build());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -182,7 +184,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
|
|||
}
|
||||
return new PrefixQuery(new Term(field, BytesRef.deepCopyOf(termAtt.getBytesRef())));
|
||||
} else {
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
for (int i = 0; i < numTokens; i++) {
|
||||
try {
|
||||
boolean hasNext = buffer.incrementToken();
|
||||
|
@ -192,7 +194,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
|
|||
}
|
||||
bq.add(new BooleanClause(new PrefixQuery(new Term(field, BytesRef.deepCopyOf(termAtt.getBytesRef()))), BooleanClause.Occur.SHOULD));
|
||||
}
|
||||
return bq;
|
||||
return bq.build();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// Bail on any exceptions, going with a regular prefix query
|
||||
|
|
|
@ -193,7 +193,8 @@ public class TermsQueryParser implements QueryParser {
|
|||
query = new TermsQuery(fieldName, filterValues);
|
||||
}
|
||||
} else {
|
||||
BooleanQuery bq = new BooleanQuery(disableCoord);
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
bq.setDisableCoord(disableCoord);
|
||||
for (Object term : terms) {
|
||||
if (fieldType != null) {
|
||||
bq.add(fieldType.termQuery(term, parseContext), Occur.SHOULD);
|
||||
|
@ -201,7 +202,7 @@ public class TermsQueryParser implements QueryParser {
|
|||
bq.add(new TermQuery(new Term(fieldName, BytesRefs.toBytesRef(term))), Occur.SHOULD);
|
||||
}
|
||||
}
|
||||
query = Queries.applyMinimumShouldMatch(bq, minShouldMatch);
|
||||
query = Queries.applyMinimumShouldMatch(bq.build(), minShouldMatch);
|
||||
}
|
||||
query.setBoost(boost);
|
||||
|
||||
|
|
|
@ -156,10 +156,10 @@ public class FunctionScoreQueryParser implements QueryParser {
|
|||
} else if (query == null && filter != null) {
|
||||
query = new ConstantScoreQuery(filter);
|
||||
} else if (query != null && filter != null) {
|
||||
final BooleanQuery filtered = new BooleanQuery();
|
||||
final BooleanQuery.Builder filtered = new BooleanQuery.Builder();
|
||||
filtered.add(query, Occur.MUST);
|
||||
filtered.add(filter, Occur.FILTER);
|
||||
query = filtered;
|
||||
query = filtered.build();
|
||||
}
|
||||
// if all filter elements returned null, just use the query
|
||||
if (filterFunctions.isEmpty() && combineFunction == null) {
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.index.query.support;
|
|||
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
|
@ -53,7 +53,7 @@ public class NestedInnerQueryParseSupport {
|
|||
protected boolean queryFound = false;
|
||||
protected boolean filterFound = false;
|
||||
|
||||
protected BitDocIdSetFilter parentFilter;
|
||||
protected BitSetProducer parentFilter;
|
||||
protected Filter childFilter;
|
||||
|
||||
protected ObjectMapper nestedObjectMapper;
|
||||
|
|
|
@ -132,11 +132,11 @@ public class MultiMatchQuery extends MatchQuery {
|
|||
}
|
||||
return disMaxQuery;
|
||||
} else {
|
||||
final BooleanQuery booleanQuery = new BooleanQuery();
|
||||
final BooleanQuery.Builder booleanQuery = new BooleanQuery.Builder();
|
||||
for (Query query : groupQuery) {
|
||||
booleanQuery.add(query, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
return booleanQuery;
|
||||
return booleanQuery.build();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -35,8 +35,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.search.XFilteredDocIdSetIterator;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.util.LongBitSet;
|
||||
import org.elasticsearch.common.lucene.IndexCacheableQuery;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
|
@ -61,9 +60,9 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery {
|
|||
private final String childType;
|
||||
private final Filter parentFilter;
|
||||
private final int shortCircuitParentDocSet;
|
||||
private final BitDocIdSetFilter nonNestedDocsFilter;
|
||||
private final BitSetProducer nonNestedDocsFilter;
|
||||
|
||||
public ChildrenConstantScoreQuery(IndexParentChildFieldData parentChildIndexFieldData, Query childQuery, String parentType, String childType, Filter parentFilter, int shortCircuitParentDocSet, BitDocIdSetFilter nonNestedDocsFilter) {
|
||||
public ChildrenConstantScoreQuery(IndexParentChildFieldData parentChildIndexFieldData, Query childQuery, String parentType, String childType, Filter parentFilter, int shortCircuitParentDocSet, BitSetProducer nonNestedDocsFilter) {
|
||||
this.parentChildIndexFieldData = parentChildIndexFieldData;
|
||||
this.parentFilter = parentFilter;
|
||||
this.parentType = parentType;
|
||||
|
@ -92,7 +91,7 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery {
|
|||
final long valueCount;
|
||||
List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
|
||||
if (globalIfd == null || leaves.isEmpty()) {
|
||||
return new BooleanQuery().createWeight(searcher, needsScores);
|
||||
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
|
||||
} else {
|
||||
AtomicParentChildFieldData afd = globalIfd.load(leaves.get(0));
|
||||
SortedDocValues globalValues = afd.getOrdinalsValues(parentType);
|
||||
|
@ -100,7 +99,7 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery {
|
|||
}
|
||||
|
||||
if (valueCount == 0) {
|
||||
return new BooleanQuery().createWeight(searcher, needsScores);
|
||||
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
|
||||
}
|
||||
|
||||
ParentOrdCollector collector = new ParentOrdCollector(globalIfd, valueCount, parentType);
|
||||
|
@ -108,7 +107,7 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery {
|
|||
|
||||
final long remaining = collector.foundParents();
|
||||
if (remaining == 0) {
|
||||
return new BooleanQuery().createWeight(searcher, needsScores);
|
||||
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
|
||||
}
|
||||
|
||||
Filter shortCircuitFilter = null;
|
||||
|
|
|
@ -34,8 +34,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.search.XFilteredDocIdSetIterator;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.util.ToStringUtils;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
|
@ -74,9 +73,9 @@ public final class ChildrenQuery extends IndexCacheableQuery {
|
|||
protected final int minChildren;
|
||||
protected final int maxChildren;
|
||||
protected final int shortCircuitParentDocSet;
|
||||
protected final BitDocIdSetFilter nonNestedDocsFilter;
|
||||
protected final BitSetProducer nonNestedDocsFilter;
|
||||
|
||||
public ChildrenQuery(ParentChildIndexFieldData ifd, String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int minChildren, int maxChildren, int shortCircuitParentDocSet, BitDocIdSetFilter nonNestedDocsFilter) {
|
||||
public ChildrenQuery(ParentChildIndexFieldData ifd, String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int minChildren, int maxChildren, int shortCircuitParentDocSet, BitSetProducer nonNestedDocsFilter) {
|
||||
this.ifd = ifd;
|
||||
this.parentType = parentType;
|
||||
this.childType = childType;
|
||||
|
@ -150,7 +149,7 @@ public final class ChildrenQuery extends IndexCacheableQuery {
|
|||
IndexParentChildFieldData globalIfd = ifd.loadGlobal(searcher.getIndexReader());
|
||||
if (globalIfd == null) {
|
||||
// No docs of the specified type exist on this shard
|
||||
return new BooleanQuery().createWeight(searcher, needsScores);
|
||||
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
|
||||
}
|
||||
|
||||
boolean abort = true;
|
||||
|
@ -193,7 +192,7 @@ public final class ChildrenQuery extends IndexCacheableQuery {
|
|||
searcher.search(childQuery, collector);
|
||||
numFoundParents = collector.foundParents();
|
||||
if (numFoundParents == 0) {
|
||||
return new BooleanQuery().createWeight(searcher, needsScores);
|
||||
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
|
||||
}
|
||||
abort = false;
|
||||
} finally {
|
||||
|
|
|
@ -81,7 +81,7 @@ public class ParentConstantScoreQuery extends IndexCacheableQuery {
|
|||
final long maxOrd;
|
||||
List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
|
||||
if (globalIfd == null || leaves.isEmpty()) {
|
||||
return new BooleanQuery().createWeight(searcher, needsScores);
|
||||
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
|
||||
} else {
|
||||
AtomicParentChildFieldData afd = globalIfd.load(leaves.get(0));
|
||||
SortedDocValues globalValues = afd.getOrdinalsValues(parentType);
|
||||
|
@ -89,14 +89,14 @@ public class ParentConstantScoreQuery extends IndexCacheableQuery {
|
|||
}
|
||||
|
||||
if (maxOrd == 0) {
|
||||
return new BooleanQuery().createWeight(searcher, needsScores);
|
||||
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
|
||||
}
|
||||
|
||||
ParentOrdsCollector collector = new ParentOrdsCollector(globalIfd, maxOrd, parentType);
|
||||
searcher.search(parentQuery, collector);
|
||||
|
||||
if (collector.parentCount() == 0) {
|
||||
return new BooleanQuery().createWeight(searcher, needsScores);
|
||||
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
|
||||
}
|
||||
|
||||
return new ChildrenWeight(this, childrenFilter, collector, globalIfd);
|
||||
|
|
|
@ -21,17 +21,12 @@ package org.elasticsearch.index.search.child;
|
|||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.PostingsEnum;
|
||||
import org.apache.lucene.index.SortedDocValues;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.util.BitDocIdSet;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.apache.lucene.util.Bits;
|
||||
|
@ -57,75 +52,51 @@ import java.io.IOException;
|
|||
*/
|
||||
final class ParentIdsFilter extends Filter {
|
||||
|
||||
static Filter createShortCircuitFilter(BitDocIdSetFilter nonNestedDocsFilter, SearchContext searchContext,
|
||||
static Filter createShortCircuitFilter(BitSetProducer nonNestedDocsFilter, SearchContext searchContext,
|
||||
String parentType, SortedDocValues globalValues,
|
||||
LongBitSet parentOrds, long numFoundParents) {
|
||||
if (numFoundParents == 1) {
|
||||
BytesRef id = globalValues.lookupOrd((int) parentOrds.nextSetBit(0));
|
||||
if (nonNestedDocsFilter != null) {
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
bq.add(new TermQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))), Occur.MUST);
|
||||
bq.add(nonNestedDocsFilter, Occur.MUST);
|
||||
return new QueryWrapperFilter(bq);
|
||||
} else {
|
||||
return new QueryWrapperFilter(new TermQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))));
|
||||
BytesRefHash parentIds= null;
|
||||
boolean constructed = false;
|
||||
try {
|
||||
parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays());
|
||||
for (long parentOrd = parentOrds.nextSetBit(0); parentOrd != -1; parentOrd = parentOrds.nextSetBit(parentOrd + 1)) {
|
||||
parentIds.add(globalValues.lookupOrd((int) parentOrd));
|
||||
}
|
||||
} else {
|
||||
BytesRefHash parentIds= null;
|
||||
boolean constructed = false;
|
||||
try {
|
||||
parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays());
|
||||
for (long parentOrd = parentOrds.nextSetBit(0); parentOrd != -1; parentOrd = parentOrds.nextSetBit(parentOrd + 1)) {
|
||||
parentIds.add(globalValues.lookupOrd((int) parentOrd));
|
||||
}
|
||||
constructed = true;
|
||||
} finally {
|
||||
if (!constructed) {
|
||||
Releasables.close(parentIds);
|
||||
}
|
||||
constructed = true;
|
||||
} finally {
|
||||
if (!constructed) {
|
||||
Releasables.close(parentIds);
|
||||
}
|
||||
searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION);
|
||||
return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
|
||||
}
|
||||
searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION);
|
||||
return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
|
||||
}
|
||||
|
||||
static Filter createShortCircuitFilter(BitDocIdSetFilter nonNestedDocsFilter, SearchContext searchContext,
|
||||
static Filter createShortCircuitFilter(BitSetProducer nonNestedDocsFilter, SearchContext searchContext,
|
||||
String parentType, SortedDocValues globalValues,
|
||||
LongHash parentIdxs, long numFoundParents) {
|
||||
if (numFoundParents == 1) {
|
||||
BytesRef id = globalValues.lookupOrd((int) parentIdxs.get(0));
|
||||
if (nonNestedDocsFilter != null) {
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
bq.add(new TermQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))), Occur.MUST);
|
||||
bq.add(nonNestedDocsFilter, Occur.MUST);
|
||||
return new QueryWrapperFilter(bq);
|
||||
} else {
|
||||
return new QueryWrapperFilter(new TermQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))));
|
||||
BytesRefHash parentIds = null;
|
||||
boolean constructed = false;
|
||||
try {
|
||||
parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays());
|
||||
for (int id = 0; id < parentIdxs.size(); id++) {
|
||||
parentIds.add(globalValues.lookupOrd((int) parentIdxs.get(id)));
|
||||
}
|
||||
} else {
|
||||
BytesRefHash parentIds = null;
|
||||
boolean constructed = false;
|
||||
try {
|
||||
parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays());
|
||||
for (int id = 0; id < parentIdxs.size(); id++) {
|
||||
parentIds.add(globalValues.lookupOrd((int) parentIdxs.get(id)));
|
||||
}
|
||||
constructed = true;
|
||||
} finally {
|
||||
if (!constructed) {
|
||||
Releasables.close(parentIds);
|
||||
}
|
||||
constructed = true;
|
||||
} finally {
|
||||
if (!constructed) {
|
||||
Releasables.close(parentIds);
|
||||
}
|
||||
searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION);
|
||||
return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
|
||||
}
|
||||
searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION);
|
||||
return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
|
||||
}
|
||||
|
||||
private final BytesRef parentTypeBr;
|
||||
private final BitDocIdSetFilter nonNestedDocsFilter;
|
||||
private final BitSetProducer nonNestedDocsFilter;
|
||||
private final BytesRefHash parentIds;
|
||||
|
||||
private ParentIdsFilter(String parentType, BitDocIdSetFilter nonNestedDocsFilter, BytesRefHash parentIds) {
|
||||
private ParentIdsFilter(String parentType, BitSetProducer nonNestedDocsFilter, BytesRefHash parentIds) {
|
||||
this.nonNestedDocsFilter = nonNestedDocsFilter;
|
||||
this.parentTypeBr = new BytesRef(parentType);
|
||||
this.parentIds = parentIds;
|
||||
|
@ -148,7 +119,7 @@ final class ParentIdsFilter extends Filter {
|
|||
|
||||
BitSet nonNestedDocs = null;
|
||||
if (nonNestedDocsFilter != null) {
|
||||
nonNestedDocs = nonNestedDocsFilter.getDocIdSet(context).bits();
|
||||
nonNestedDocs = nonNestedDocsFilter.getBitSet(context);
|
||||
}
|
||||
|
||||
PostingsEnum docsEnum = null;
|
||||
|
|
|
@ -125,14 +125,14 @@ public class ParentQuery extends IndexCacheableQuery {
|
|||
IndexParentChildFieldData globalIfd = parentChildIndexFieldData.loadGlobal(searcher.getIndexReader());
|
||||
if (globalIfd == null) {
|
||||
// No docs of the specified type don't exist on this shard
|
||||
return new BooleanQuery().createWeight(searcher, needsScores);
|
||||
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
|
||||
}
|
||||
|
||||
try {
|
||||
collector = new ParentOrdAndScoreCollector(sc, globalIfd, parentType);
|
||||
searcher.search(parentQuery, collector);
|
||||
if (collector.parentCount() == 0) {
|
||||
return new BooleanQuery().createWeight(searcher, needsScores);
|
||||
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
|
||||
}
|
||||
childWeight = new ChildWeight(this, parentQuery.createWeight(searcher, needsScores), childrenFilter, collector, globalIfd);
|
||||
releaseCollectorResource = false;
|
||||
|
|
|
@ -43,18 +43,18 @@ public class IndexedGeoBoundingBoxQuery {
|
|||
}
|
||||
|
||||
private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) {
|
||||
BooleanQuery filter = new BooleanQuery();
|
||||
BooleanQuery.Builder filter = new BooleanQuery.Builder();
|
||||
filter.setMinimumNumberShouldMatch(1);
|
||||
filter.add(fieldType.lonFieldType().rangeQuery(null, bottomRight.lon(), true, true), Occur.SHOULD);
|
||||
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), null, true, true), Occur.SHOULD);
|
||||
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST);
|
||||
return new ConstantScoreQuery(filter);
|
||||
return new ConstantScoreQuery(filter.build());
|
||||
}
|
||||
|
||||
private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) {
|
||||
BooleanQuery filter = new BooleanQuery();
|
||||
BooleanQuery.Builder filter = new BooleanQuery.Builder();
|
||||
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true), Occur.MUST);
|
||||
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST);
|
||||
return new ConstantScoreQuery(filter);
|
||||
return new ConstantScoreQuery(filter.build());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,15 +19,17 @@
|
|||
|
||||
package org.elasticsearch.index.search.nested;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BitDocIdSet;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
@ -41,7 +43,7 @@ import java.util.Set;
|
|||
*/
|
||||
public class IncludeNestedDocsQuery extends Query {
|
||||
|
||||
private final BitDocIdSetFilter parentFilter;
|
||||
private final BitSetProducer parentFilter;
|
||||
private final Query parentQuery;
|
||||
|
||||
// If we are rewritten, this is the original childQuery we
|
||||
|
@ -52,7 +54,7 @@ public class IncludeNestedDocsQuery extends Query {
|
|||
private final Query origParentQuery;
|
||||
|
||||
|
||||
public IncludeNestedDocsQuery(Query parentQuery, BitDocIdSetFilter parentFilter) {
|
||||
public IncludeNestedDocsQuery(Query parentQuery, BitSetProducer parentFilter) {
|
||||
this.origParentQuery = parentQuery;
|
||||
this.parentQuery = parentQuery;
|
||||
this.parentFilter = parentFilter;
|
||||
|
@ -82,9 +84,9 @@ public class IncludeNestedDocsQuery extends Query {
|
|||
|
||||
private final Query parentQuery;
|
||||
private final Weight parentWeight;
|
||||
private final BitDocIdSetFilter parentsFilter;
|
||||
private final BitSetProducer parentsFilter;
|
||||
|
||||
IncludeNestedDocsWeight(Query query, Query parentQuery, Weight parentWeight, BitDocIdSetFilter parentsFilter) {
|
||||
IncludeNestedDocsWeight(Query query, Query parentQuery, Weight parentWeight, BitSetProducer parentsFilter) {
|
||||
super(query);
|
||||
this.parentQuery = parentQuery;
|
||||
this.parentWeight = parentWeight;
|
||||
|
@ -115,7 +117,7 @@ public class IncludeNestedDocsQuery extends Query {
|
|||
return null;
|
||||
}
|
||||
|
||||
BitDocIdSet parents = parentsFilter.getDocIdSet(context);
|
||||
BitSet parents = parentsFilter.getBitSet(context);
|
||||
if (parents == null) {
|
||||
// No matches
|
||||
return null;
|
||||
|
@ -144,10 +146,10 @@ public class IncludeNestedDocsQuery extends Query {
|
|||
int currentParentPointer = -1;
|
||||
int currentDoc = -1;
|
||||
|
||||
IncludeNestedDocsScorer(Weight weight, Scorer parentScorer, BitDocIdSet parentBits, int currentParentPointer) {
|
||||
IncludeNestedDocsScorer(Weight weight, Scorer parentScorer, BitSet parentBits, int currentParentPointer) {
|
||||
super(weight);
|
||||
this.parentScorer = parentScorer;
|
||||
this.parentBits = parentBits.bits();
|
||||
this.parentBits = parentBits;
|
||||
this.currentParentPointer = currentParentPointer;
|
||||
if (currentParentPointer == 0) {
|
||||
currentChildPointer = 0;
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
package org.elasticsearch.index.shard;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
@ -34,7 +34,12 @@ import org.elasticsearch.index.aliases.IndexAliasesService;
|
|||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.engine.IgnoreOnRecoveryEngineException;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperForType;
|
||||
import org.elasticsearch.index.mapper.MapperException;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.MapperUtils;
|
||||
import org.elasticsearch.index.mapper.Mapping;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
|
@ -222,7 +227,7 @@ public class TranslogRecoveryPerformer {
|
|||
}
|
||||
|
||||
Query aliasFilter = indexAliasesService.aliasFilter(filteringAliases);
|
||||
BitDocIdSetFilter parentFilter = mapperService.hasNested() ? indexCache.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()) : null;
|
||||
BitSetProducer parentFilter = mapperService.hasNested() ? indexCache.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()) : null;
|
||||
return new Engine.DeleteByQuery(query, source, filteringAliases, aliasFilter, parentFilter, origin, startTime, types);
|
||||
}
|
||||
|
||||
|
|
|
@ -805,10 +805,10 @@ public class PercolatorService extends AbstractComponent {
|
|||
|
||||
final Query filter;
|
||||
if (context.aliasFilter() != null) {
|
||||
BooleanQuery booleanFilter = new BooleanQuery();
|
||||
BooleanQuery.Builder booleanFilter = new BooleanQuery.Builder();
|
||||
booleanFilter.add(context.aliasFilter(), BooleanClause.Occur.MUST);
|
||||
booleanFilter.add(percolatorTypeFilter, BooleanClause.Occur.MUST);
|
||||
filter = booleanFilter;
|
||||
filter = booleanFilter.build();
|
||||
} else {
|
||||
filter = percolatorTypeFilter;
|
||||
}
|
||||
|
|
|
@ -119,9 +119,10 @@ public class AggregationPhase implements SearchPhase {
|
|||
Query query = Queries.newMatchAllQuery();
|
||||
Query searchFilter = context.searchFilter(context.types());
|
||||
if (searchFilter != null) {
|
||||
BooleanQuery filtered = new BooleanQuery();
|
||||
filtered.add(query, Occur.MUST);
|
||||
filtered.add(searchFilter, Occur.FILTER);
|
||||
BooleanQuery filtered = new BooleanQuery.Builder()
|
||||
.add(query, Occur.MUST)
|
||||
.add(searchFilter, Occur.FILTER)
|
||||
.build();
|
||||
query = filtered;
|
||||
}
|
||||
try {
|
||||
|
|
|
@ -18,16 +18,16 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.nested;
|
||||
|
||||
import org.apache.lucene.index.IndexReaderContext;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.index.ReaderUtil;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.util.BitDocIdSet;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
|
@ -50,8 +50,8 @@ import java.util.Map;
|
|||
*/
|
||||
public class NestedAggregator extends SingleBucketAggregator {
|
||||
|
||||
private BitDocIdSetFilter parentFilter;
|
||||
private final Filter childFilter;
|
||||
private BitSetProducer parentFilter;
|
||||
private final Query childFilter;
|
||||
|
||||
private DocIdSetIterator childDocs;
|
||||
private BitSet parentDocs;
|
||||
|
@ -65,13 +65,11 @@ public class NestedAggregator extends SingleBucketAggregator {
|
|||
public LeafBucketCollector getLeafCollector(final LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException {
|
||||
// Reset parentFilter, so we resolve the parentDocs for each new segment being searched
|
||||
this.parentFilter = null;
|
||||
// In ES if parent is deleted, then also the children are deleted. Therefore acceptedDocs can also null here.
|
||||
DocIdSet childDocIdSet = childFilter.getDocIdSet(ctx, null);
|
||||
if (Lucene.isEmpty(childDocIdSet)) {
|
||||
childDocs = null;
|
||||
} else {
|
||||
childDocs = childDocIdSet.iterator();
|
||||
}
|
||||
final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(ctx);
|
||||
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
|
||||
searcher.setQueryCache(null);
|
||||
final Weight weight = searcher.createNormalizedWeight(childFilter, false);
|
||||
childDocs = weight.scorer(ctx);
|
||||
|
||||
return new LeafBucketCollectorBase(sub, null) {
|
||||
@Override
|
||||
|
@ -91,18 +89,16 @@ public class NestedAggregator extends SingleBucketAggregator {
|
|||
// Additional NOTE: Before this logic was performed in the setNextReader(...) method, but the the assumption
|
||||
// that aggs instances are constructed in reverse doesn't hold when buckets are constructed lazily during
|
||||
// aggs execution
|
||||
Filter parentFilterNotCached = findClosestNestedPath(parent());
|
||||
Query parentFilterNotCached = findClosestNestedPath(parent());
|
||||
if (parentFilterNotCached == null) {
|
||||
parentFilterNotCached = Queries.newNonNestedFilter();
|
||||
}
|
||||
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(parentFilterNotCached);
|
||||
BitDocIdSet parentSet = parentFilter.getDocIdSet(ctx);
|
||||
if (Lucene.isEmpty(parentSet)) {
|
||||
parentFilter = context.searchContext().bitsetFilterCache().getBitSetProducer(parentFilterNotCached);
|
||||
parentDocs = parentFilter.getBitSet(ctx);
|
||||
if (parentDocs == null) {
|
||||
// There are no parentDocs in the segment, so return and set childDocs to null, so we exit early for future invocations.
|
||||
childDocs = null;
|
||||
return;
|
||||
} else {
|
||||
parentDocs = parentSet.bits();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -130,7 +126,7 @@ public class NestedAggregator extends SingleBucketAggregator {
|
|||
return new InternalNested(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
private static Filter findClosestNestedPath(Aggregator parent) {
|
||||
private static Query findClosestNestedPath(Aggregator parent) {
|
||||
for (; parent != null; parent = parent.parent()) {
|
||||
if (parent instanceof NestedAggregator) {
|
||||
return ((NestedAggregator) parent).childFilter;
|
||||
|
|
|
@ -22,13 +22,10 @@ import com.carrotsearch.hppc.LongIntHashMap;
|
|||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.util.BitDocIdSet;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.search.SearchParseException;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
|
@ -52,30 +49,28 @@ import java.util.Map;
|
|||
*/
|
||||
public class ReverseNestedAggregator extends SingleBucketAggregator {
|
||||
|
||||
private final BitDocIdSetFilter parentFilter;
|
||||
private final Query parentFilter;
|
||||
private final BitSetProducer parentBitsetProducer;
|
||||
|
||||
public ReverseNestedAggregator(String name, AggregatorFactories factories, ObjectMapper objectMapper,
|
||||
AggregationContext aggregationContext, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
super(name, factories, aggregationContext, parent, pipelineAggregators, metaData);
|
||||
if (objectMapper == null) {
|
||||
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter());
|
||||
parentFilter = Queries.newNonNestedFilter();
|
||||
} else {
|
||||
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(objectMapper.nestedTypeFilter());
|
||||
parentFilter = objectMapper.nestedTypeFilter();
|
||||
}
|
||||
|
||||
parentBitsetProducer = context.searchContext().bitsetFilterCache().getBitSetProducer(parentFilter);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException {
|
||||
// In ES if parent is deleted, then also the children are deleted, so the child docs this agg receives
|
||||
// must belong to parent docs that is alive. For this reason acceptedDocs can be null here.
|
||||
BitDocIdSet docIdSet = parentFilter.getDocIdSet(ctx);
|
||||
final BitSet parentDocs;
|
||||
if (Lucene.isEmpty(docIdSet)) {
|
||||
final BitSet parentDocs = parentBitsetProducer.getBitSet(ctx);
|
||||
if (parentDocs == null) {
|
||||
return LeafBucketCollector.NO_OP_COLLECTOR;
|
||||
} else {
|
||||
parentDocs = docIdSet.bits();
|
||||
}
|
||||
final LongIntHashMap bucketOrdToLastCollectedParentDoc = new LongIntHashMap(32);
|
||||
return new LeafBucketCollectorBase(sub, null) {
|
||||
|
@ -120,7 +115,7 @@ public class ReverseNestedAggregator extends SingleBucketAggregator {
|
|||
return new InternalReverseNested(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
Filter getParentFilter() {
|
||||
Query getParentFilter() {
|
||||
return parentFilter;
|
||||
}
|
||||
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.apache.lucene.index.ReaderUtil;
|
|||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.util.BitDocIdSet;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -192,8 +191,7 @@ public class FetchPhase implements SearchPhase {
|
|||
|
||||
private int findRootDocumentIfNested(SearchContext context, LeafReaderContext subReaderContext, int subDocId) throws IOException {
|
||||
if (context.mapperService().hasNested()) {
|
||||
BitDocIdSet nonNested = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()).getDocIdSet(subReaderContext);
|
||||
BitSet bits = nonNested.bits();
|
||||
BitSet bits = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()).getBitSet(subReaderContext);
|
||||
if (!bits.get(subDocId)) {
|
||||
return bits.nextSetBit(subDocId);
|
||||
}
|
||||
|
@ -384,8 +382,7 @@ public class FetchPhase implements SearchPhase {
|
|||
continue;
|
||||
}
|
||||
|
||||
BitDocIdSet parentBitSet = context.bitsetFilterCache().getBitDocIdSetFilter(parentFilter).getDocIdSet(subReaderContext);
|
||||
BitSet parentBits = parentBitSet.bits();
|
||||
BitSet parentBits = context.bitsetFilterCache().getBitSetProducer(parentFilter).getBitSet(subReaderContext);
|
||||
|
||||
int offset = 0;
|
||||
int nextParent = parentBits.nextSetBit(currentParent);
|
||||
|
|
|
@ -24,9 +24,8 @@ import org.apache.lucene.index.LeafReaderContext;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
|
@ -117,7 +116,7 @@ public final class InnerHitsContext {
|
|||
} else {
|
||||
rawParentFilter = parentObjectMapper.nestedTypeFilter();
|
||||
}
|
||||
BitDocIdSetFilter parentFilter = context.bitsetFilterCache().getBitDocIdSetFilter(rawParentFilter);
|
||||
BitSetProducer parentFilter = context.bitsetFilterCache().getBitSetProducer(rawParentFilter);
|
||||
Filter childFilter = childObjectMapper.nestedTypeFilter();
|
||||
Query q = Queries.filtered(query.query(), new NestedChildrenQuery(parentFilter, childFilter, hitContext));
|
||||
|
||||
|
@ -147,12 +146,12 @@ public final class InnerHitsContext {
|
|||
// A filter that only emits the nested children docs of a specific nested parent doc
|
||||
static class NestedChildrenQuery extends Query {
|
||||
|
||||
private final BitDocIdSetFilter parentFilter;
|
||||
private final BitSetProducer parentFilter;
|
||||
private final Filter childFilter;
|
||||
private final int docId;
|
||||
private final LeafReader leafReader;
|
||||
|
||||
NestedChildrenQuery(BitDocIdSetFilter parentFilter, Filter childFilter, FetchSubPhase.HitContext hitContext) {
|
||||
NestedChildrenQuery(BitSetProducer parentFilter, Filter childFilter, FetchSubPhase.HitContext hitContext) {
|
||||
this.parentFilter = parentFilter;
|
||||
this.childFilter = childFilter;
|
||||
this.docId = hitContext.docId();
|
||||
|
@ -202,7 +201,7 @@ public final class InnerHitsContext {
|
|||
return null;
|
||||
}
|
||||
|
||||
final BitSet parents = parentFilter.getDocIdSet(context).bits();
|
||||
final BitSet parents = parentFilter.getBitSet(context);
|
||||
final int firstChildDocId = parents.prevSetBit(docId - 1) + 1;
|
||||
// A parent doc doesn't have child docs, so we can early exit here:
|
||||
if (firstChildDocId == docId) {
|
||||
|
@ -293,12 +292,13 @@ public final class InnerHitsContext {
|
|||
return Lucene.EMPTY_TOP_DOCS;
|
||||
}
|
||||
|
||||
BooleanQuery q = new BooleanQuery();
|
||||
q.add(query.query(), Occur.MUST);
|
||||
// Only include docs that have the current hit as parent
|
||||
q.add(new TermQuery(new Term(field, term)), Occur.MUST);
|
||||
// Only include docs that have this inner hits type
|
||||
q.add(documentMapper.typeFilter(), Occur.MUST);
|
||||
BooleanQuery q = new BooleanQuery.Builder()
|
||||
.add(query.query(), Occur.MUST)
|
||||
// Only include docs that have the current hit as parent
|
||||
.add(new TermQuery(new Term(field, term)), Occur.MUST)
|
||||
// Only include docs that have this inner hits type
|
||||
.add(documentMapper.typeFilter(), Occur.MUST)
|
||||
.build();
|
||||
if (size() == 0) {
|
||||
final int count = context.searcher().count(q);
|
||||
return new TopDocs(count, Lucene.EMPTY_SCORE_DOCS, 0);
|
||||
|
|
|
@ -197,9 +197,10 @@ public class DefaultSearchContext extends SearchContext {
|
|||
q.setBoost(query().getBoost());
|
||||
parsedQuery(new ParsedQuery(q, parsedQuery()));
|
||||
} else {
|
||||
BooleanQuery filtered = new BooleanQuery();
|
||||
filtered.add(query(), Occur.MUST);
|
||||
filtered.add(searchFilter, Occur.FILTER);
|
||||
BooleanQuery filtered = new BooleanQuery.Builder()
|
||||
.add(query(), Occur.MUST)
|
||||
.add(searchFilter, Occur.FILTER)
|
||||
.build();
|
||||
parsedQuery(new ParsedQuery(filtered, parsedQuery()));
|
||||
}
|
||||
}
|
||||
|
@ -216,14 +217,14 @@ public class DefaultSearchContext extends SearchContext {
|
|||
if (filter == null && aliasFilter == null) {
|
||||
return null;
|
||||
}
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
if (filter != null) {
|
||||
bq.add(filter, Occur.MUST);
|
||||
}
|
||||
if (aliasFilter != null) {
|
||||
bq.add(aliasFilter, Occur.MUST);
|
||||
}
|
||||
return new ConstantScoreQuery(bq);
|
||||
return new ConstantScoreQuery(bq.build());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -187,9 +187,10 @@ public class QueryPhase implements SearchPhase {
|
|||
// now this gets interesting: since we sort in index-order, we can directly
|
||||
// skip to the desired doc and stop collecting after ${size} matches
|
||||
if (scrollContext.lastEmittedDoc != null) {
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
bq.add(query, BooleanClause.Occur.MUST);
|
||||
bq.add(new MinDocQuery(lastEmittedDoc.doc + 1), BooleanClause.Occur.FILTER);
|
||||
BooleanQuery bq = new BooleanQuery.Builder()
|
||||
.add(query, BooleanClause.Occur.MUST)
|
||||
.add(new MinDocQuery(lastEmittedDoc.doc + 1), BooleanClause.Occur.FILTER)
|
||||
.build();
|
||||
query = bq;
|
||||
}
|
||||
searchContext.terminateAfter(numDocs);
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.apache.lucene.search.FieldComparator;
|
|||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -182,7 +182,7 @@ public class GeoDistanceSortParser implements SortParser {
|
|||
final Nested nested;
|
||||
if (nestedHelper != null && nestedHelper.getPath() != null) {
|
||||
|
||||
BitDocIdSetFilter rootDocumentsFilter = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter());
|
||||
BitSetProducer rootDocumentsFilter = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter());
|
||||
Filter innerDocumentsFilter;
|
||||
if (nestedHelper.filterFound()) {
|
||||
// TODO: use queries instead
|
||||
|
@ -213,7 +213,7 @@ public class GeoDistanceSortParser implements SortParser {
|
|||
if (nested == null) {
|
||||
selectedValues = finalSortMode.select(distanceValues, Double.MAX_VALUE);
|
||||
} else {
|
||||
final BitSet rootDocs = nested.rootDocs(context).bits();
|
||||
final BitSet rootDocs = nested.rootDocs(context);
|
||||
final DocIdSet innerDocs = nested.innerDocs(context);
|
||||
selectedValues = finalSortMode.select(distanceValues, Double.MAX_VALUE, rootDocs, innerDocs, context.reader().maxDoc());
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.lucene.search.Filter;
|
|||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
|
@ -145,7 +145,7 @@ public class ScriptSortParser implements SortParser {
|
|||
// If nested_path is specified, then wrap the `fieldComparatorSource` in a `NestedFieldComparatorSource`
|
||||
final Nested nested;
|
||||
if (nestedHelper != null && nestedHelper.getPath() != null) {
|
||||
BitDocIdSetFilter rootDocumentsFilter = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter());
|
||||
BitSetProducer rootDocumentsFilter = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter());
|
||||
Filter innerDocumentsFilter;
|
||||
if (nestedHelper.filterFound()) {
|
||||
// TODO: use queries instead
|
||||
|
|
|
@ -20,11 +20,12 @@
|
|||
package org.elasticsearch.search.sort;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
|
@ -251,7 +252,7 @@ public class SortParseElement implements SearchParseElement {
|
|||
}
|
||||
final Nested nested;
|
||||
if (nestedHelper != null && nestedHelper.getPath() != null) {
|
||||
BitDocIdSetFilter rootDocumentsFilter = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter());
|
||||
BitSetProducer rootDocumentsFilter = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter());
|
||||
Filter innerDocumentsFilter;
|
||||
if (nestedHelper.filterFound()) {
|
||||
// TODO: use queries instead
|
||||
|
|
|
@ -97,10 +97,11 @@ public class BlendedTermQueryTests extends ESTestCase {
|
|||
assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue());
|
||||
}
|
||||
{
|
||||
BooleanQuery query = new BooleanQuery(false);
|
||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||
query.setDisableCoord(true);
|
||||
query.add(new TermQuery(new Term("firstname", "simon")), BooleanClause.Occur.SHOULD);
|
||||
query.add(new TermQuery(new Term("surname", "simon")), BooleanClause.Occur.SHOULD);
|
||||
TopDocs search = searcher.search(query, 1);
|
||||
TopDocs search = searcher.search(query.build(), 1);
|
||||
ScoreDoc[] scoreDocs = search.scoreDocs;
|
||||
assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue());
|
||||
|
||||
|
@ -150,16 +151,18 @@ public class BlendedTermQueryTests extends ESTestCase {
|
|||
IndexSearcher searcher = setSimilarity(newSearcher(reader));
|
||||
{
|
||||
String[] fields = new String[]{"username", "song"};
|
||||
BooleanQuery query = new BooleanQuery(false);
|
||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||
query.setDisableCoord(true);
|
||||
query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f), BooleanClause.Occur.SHOULD);
|
||||
query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "fighters"), 0.1f), BooleanClause.Occur.SHOULD);
|
||||
query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "generator"), 0.1f), BooleanClause.Occur.SHOULD);
|
||||
TopDocs search = searcher.search(query, 10);
|
||||
TopDocs search = searcher.search(query.build(), 10);
|
||||
ScoreDoc[] scoreDocs = search.scoreDocs;
|
||||
assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue());
|
||||
}
|
||||
{
|
||||
BooleanQuery query = new BooleanQuery(false);
|
||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||
query.setDisableCoord(true);
|
||||
DisjunctionMaxQuery uname = new DisjunctionMaxQuery(0.0f);
|
||||
uname.add(new TermQuery(new Term("username", "foo")));
|
||||
uname.add(new TermQuery(new Term("song", "foo")));
|
||||
|
@ -173,7 +176,7 @@ public class BlendedTermQueryTests extends ESTestCase {
|
|||
query.add(uname, BooleanClause.Occur.SHOULD);
|
||||
query.add(s, BooleanClause.Occur.SHOULD);
|
||||
query.add(gen, BooleanClause.Occur.SHOULD);
|
||||
TopDocs search = searcher.search(query, 4);
|
||||
TopDocs search = searcher.search(query.build(), 4);
|
||||
ScoreDoc[] scoreDocs = search.scoreDocs;
|
||||
assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue());
|
||||
|
||||
|
|
|
@ -1,105 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.cache.bitset;
|
||||
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.LogByteSizeMergePolicy;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class BitSetFilterCacheTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testInvalidateEntries() throws Exception {
|
||||
IndexWriter writer = new IndexWriter(
|
||||
new RAMDirectory(),
|
||||
new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(new LogByteSizeMergePolicy())
|
||||
);
|
||||
Document document = new Document();
|
||||
document.add(new StringField("field", "value", Field.Store.NO));
|
||||
writer.addDocument(document);
|
||||
writer.commit();
|
||||
|
||||
document = new Document();
|
||||
document.add(new StringField("field", "value", Field.Store.NO));
|
||||
writer.addDocument(document);
|
||||
writer.commit();
|
||||
|
||||
document = new Document();
|
||||
document.add(new StringField("field", "value", Field.Store.NO));
|
||||
writer.addDocument(document);
|
||||
writer.commit();
|
||||
|
||||
IndexReader reader = DirectoryReader.open(writer, false);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
BitsetFilterCache cache = new BitsetFilterCache(new Index("test"), Settings.EMPTY);
|
||||
BitDocIdSetFilter filter = cache.getBitDocIdSetFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "value"))));
|
||||
TopDocs docs = searcher.search(new ConstantScoreQuery(filter), 1);
|
||||
assertThat(docs.totalHits, equalTo(3));
|
||||
|
||||
// now cached
|
||||
docs = searcher.search(new ConstantScoreQuery(filter), 1);
|
||||
assertThat(docs.totalHits, equalTo(3));
|
||||
// There are 3 segments
|
||||
assertThat(cache.getLoadedFilters().size(), equalTo(3l));
|
||||
|
||||
writer.forceMerge(1);
|
||||
reader.close();
|
||||
reader = DirectoryReader.open(writer, false);
|
||||
searcher = new IndexSearcher(reader);
|
||||
|
||||
docs = searcher.search(new ConstantScoreQuery(filter), 1);
|
||||
assertThat(docs.totalHits, equalTo(3));
|
||||
|
||||
// now cached
|
||||
docs = searcher.search(new ConstantScoreQuery(filter), 1);
|
||||
assertThat(docs.totalHits, equalTo(3));
|
||||
// Only one segment now, so the size must be 1
|
||||
assertThat(cache.getLoadedFilters().size(), equalTo(1l));
|
||||
|
||||
reader.close();
|
||||
writer.close();
|
||||
// There is no reference from readers and writer to any segment in the test index, so the size in the fbs cache must be 0
|
||||
assertThat(cache.getLoadedFilters().size(), equalTo(0l));
|
||||
}
|
||||
|
||||
}
|
|
@ -130,7 +130,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
|
|||
|
||||
protected Nested createNested(Filter parentFilter, Filter childFilter) {
|
||||
BitsetFilterCache s = indexService.bitsetFilterCache();
|
||||
return new Nested(s.getBitDocIdSetFilter(parentFilter), s.getBitDocIdSetFilter(childFilter));
|
||||
return new Nested(s.getBitSetProducer(parentFilter), childFilter);
|
||||
}
|
||||
|
||||
public void testEmpty() throws Exception {
|
||||
|
|
|
@ -834,7 +834,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
IndexQueryParserService queryParser = queryParser();
|
||||
Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), boolQuery().must(termQuery("name.first", "shay1")).must(termQuery("name.first", "shay4")).mustNot(termQuery("name.first", "shay2")).should(termQuery("name.first", "shay3")))).query();
|
||||
|
||||
BooleanQuery filter = new BooleanQuery();
|
||||
BooleanQuery.Builder filter = new BooleanQuery.Builder();
|
||||
filter.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST);
|
||||
filter.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST);
|
||||
filter.add(new TermQuery(new Term("name.first", "shay2")), Occur.MUST_NOT);
|
||||
|
@ -842,7 +842,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
filter.setMinimumNumberShouldMatch(1);
|
||||
Query expected = Queries.filtered(
|
||||
new TermQuery(new Term("name.first", "shay")),
|
||||
filter);
|
||||
filter.build());
|
||||
assertEquals(expected, parsedQuery);
|
||||
}
|
||||
|
||||
|
@ -851,7 +851,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
IndexQueryParserService queryParser = queryParser();
|
||||
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/bool-filter.json");
|
||||
Query parsedQuery = queryParser.parse(query).query();
|
||||
BooleanQuery filter = new BooleanQuery();
|
||||
BooleanQuery.Builder filter = new BooleanQuery.Builder();
|
||||
filter.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST);
|
||||
filter.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST);
|
||||
filter.add(new TermQuery(new Term("name.first", "shay2")), Occur.MUST_NOT);
|
||||
|
@ -859,7 +859,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
filter.setMinimumNumberShouldMatch(1);
|
||||
Query expected = Queries.filtered(
|
||||
new TermQuery(new Term("name.first", "shay")),
|
||||
filter);
|
||||
filter.build());
|
||||
assertEquals(expected, parsedQuery);
|
||||
}
|
||||
|
||||
|
@ -867,12 +867,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
public void testAndFilteredQueryBuilder() throws IOException {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
Query parsedQuery = queryParser.parse(filteredQuery(matchAllQuery(), andQuery(termQuery("name.first", "shay1"), termQuery("name.first", "shay4")))).query();
|
||||
BooleanQuery and = new BooleanQuery();
|
||||
BooleanQuery.Builder and = new BooleanQuery.Builder();
|
||||
and.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST);
|
||||
and.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST);
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
builder.add(new BooleanClause(new MatchAllDocsQuery(), Occur.MUST));
|
||||
builder.add(new BooleanClause(and, Occur.FILTER));
|
||||
builder.add(new MatchAllDocsQuery(), Occur.MUST);
|
||||
builder.add(and.build(), Occur.FILTER);
|
||||
assertEquals(builder.build(), parsedQuery);
|
||||
}
|
||||
|
||||
|
@ -881,12 +881,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
IndexQueryParserService queryParser = queryParser();
|
||||
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/and-filter.json");
|
||||
Query parsedQuery = queryParser.parse(query).query();
|
||||
BooleanQuery and = new BooleanQuery();
|
||||
BooleanQuery.Builder and = new BooleanQuery.Builder();
|
||||
and.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST);
|
||||
and.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST);
|
||||
Query expected = Queries.filtered(
|
||||
new TermQuery(new Term("name.first", "shay")),
|
||||
and);
|
||||
and.build());
|
||||
assertEquals(expected, parsedQuery);
|
||||
}
|
||||
|
||||
|
@ -895,12 +895,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
IndexQueryParserService queryParser = queryParser();
|
||||
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/and-filter-named.json");
|
||||
Query parsedQuery = queryParser.parse(query).query();
|
||||
BooleanQuery and = new BooleanQuery();
|
||||
BooleanQuery.Builder and = new BooleanQuery.Builder();
|
||||
and.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST);
|
||||
and.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST);
|
||||
Query expected = Queries.filtered(
|
||||
new TermQuery(new Term("name.first", "shay")),
|
||||
and);
|
||||
and.build());
|
||||
assertEquals(expected, parsedQuery);
|
||||
}
|
||||
|
||||
|
@ -909,12 +909,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
IndexQueryParserService queryParser = queryParser();
|
||||
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/and-filter2.json");
|
||||
Query parsedQuery = queryParser.parse(query).query();
|
||||
BooleanQuery and = new BooleanQuery();
|
||||
BooleanQuery.Builder and = new BooleanQuery.Builder();
|
||||
and.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST);
|
||||
and.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST);
|
||||
Query expected = Queries.filtered(
|
||||
new TermQuery(new Term("name.first", "shay")),
|
||||
and);
|
||||
and.build());
|
||||
assertEquals(expected, parsedQuery);
|
||||
}
|
||||
|
||||
|
@ -922,12 +922,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
public void testOrFilteredQueryBuilder() throws IOException {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
Query parsedQuery = queryParser.parse(filteredQuery(matchAllQuery(), orQuery(termQuery("name.first", "shay1"), termQuery("name.first", "shay4")))).query();
|
||||
BooleanQuery or = new BooleanQuery();
|
||||
BooleanQuery.Builder or = new BooleanQuery.Builder();
|
||||
or.add(new TermQuery(new Term("name.first", "shay1")), Occur.SHOULD);
|
||||
or.add(new TermQuery(new Term("name.first", "shay4")), Occur.SHOULD);
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
builder.add(new MatchAllDocsQuery(), Occur.MUST);
|
||||
builder.add(or, Occur.FILTER);
|
||||
builder.add(or.build(), Occur.FILTER);
|
||||
assertEquals(builder.build(), parsedQuery);
|
||||
}
|
||||
|
||||
|
@ -936,12 +936,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
IndexQueryParserService queryParser = queryParser();
|
||||
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/or-filter.json");
|
||||
Query parsedQuery = queryParser.parse(query).query();
|
||||
BooleanQuery or = new BooleanQuery();
|
||||
BooleanQuery.Builder or = new BooleanQuery.Builder();
|
||||
or.add(new TermQuery(new Term("name.first", "shay1")), Occur.SHOULD);
|
||||
or.add(new TermQuery(new Term("name.first", "shay4")), Occur.SHOULD);
|
||||
Query expected = Queries.filtered(
|
||||
new TermQuery(new Term("name.first", "shay")),
|
||||
or);
|
||||
or.build());
|
||||
assertEquals(expected, parsedQuery);
|
||||
}
|
||||
|
||||
|
@ -950,12 +950,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
IndexQueryParserService queryParser = queryParser();
|
||||
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/or-filter2.json");
|
||||
Query parsedQuery = queryParser.parse(query).query();
|
||||
BooleanQuery or = new BooleanQuery();
|
||||
BooleanQuery.Builder or = new BooleanQuery.Builder();
|
||||
or.add(new TermQuery(new Term("name.first", "shay1")), Occur.SHOULD);
|
||||
or.add(new TermQuery(new Term("name.first", "shay4")), Occur.SHOULD);
|
||||
Query expected = Queries.filtered(
|
||||
new TermQuery(new Term("name.first", "shay")),
|
||||
or);
|
||||
or.build());
|
||||
assertEquals(expected, parsedQuery);
|
||||
}
|
||||
|
||||
|
@ -2520,14 +2520,14 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
try (Engine.Searcher searcher = indexService.shardSafe(0).acquireSearcher("test")) {
|
||||
Query rewrittenQuery = searcher.searcher().rewrite(parsedQuery);
|
||||
|
||||
BooleanQuery expected = new BooleanQuery();
|
||||
BooleanQuery.Builder expected = new BooleanQuery.Builder();
|
||||
expected.add(new TermQuery(new Term("foobar", "banon")), Occur.SHOULD);
|
||||
TermQuery tq1 = new TermQuery(new Term("name.first", "banon"));
|
||||
tq1.setBoost(2);
|
||||
TermQuery tq2 = new TermQuery(new Term("name.last", "banon"));
|
||||
tq2.setBoost(3);
|
||||
expected.add(new DisjunctionMaxQuery(Arrays.<Query>asList(tq1, tq2), 0f), Occur.SHOULD);
|
||||
assertEquals(expected, rewrittenQuery);
|
||||
assertEquals(expected.build(), rewrittenQuery);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,14 +19,19 @@
|
|||
|
||||
package org.elasticsearch.index.search.child;
|
||||
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.util.BitDocIdSet;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -130,8 +135,8 @@ public abstract class AbstractChildTestCase extends ESSingleNodeTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
static BitDocIdSetFilter wrapWithBitSetFilter(Filter filter) {
|
||||
return SearchContext.current().bitsetFilterCache().getBitDocIdSetFilter(filter);
|
||||
static BitSetProducer wrapWithBitSetFilter(Filter filter) {
|
||||
return SearchContext.current().bitsetFilterCache().getBitSetProducer(filter);
|
||||
}
|
||||
|
||||
static Query parseQuery(QueryBuilder queryBuilder) throws IOException {
|
||||
|
|
|
@ -20,13 +20,28 @@ package org.elasticsearch.index.search.child;
|
|||
|
||||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.PostingsEnum;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.SlowCompositeReaderWrapper;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryUtils;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
|
@ -51,7 +66,11 @@ import java.util.NavigableSet;
|
|||
import java.util.Random;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.*;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.notQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase {
|
||||
|
@ -73,7 +92,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase {
|
|||
Query childQuery = new TermQuery(new Term("field", "value"));
|
||||
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
|
||||
BitDocIdSetFilter parentFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent"))));
|
||||
Filter parentFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent")));
|
||||
Query query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, childQuery, "parent", "child", parentFilter, 12, wrapWithBitSetFilter(Queries.newNonNestedFilter()));
|
||||
QueryUtils.check(query);
|
||||
}
|
||||
|
@ -106,7 +125,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase {
|
|||
);
|
||||
|
||||
TermQuery childQuery = new TermQuery(new Term("field1", "value" + (1 + random().nextInt(3))));
|
||||
BitDocIdSetFilter parentFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent"))));
|
||||
Filter parentFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent")));
|
||||
int shortCircuitParentDocSet = random().nextInt(5);
|
||||
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
|
||||
|
|
|
@ -22,6 +22,7 @@ import com.carrotsearch.hppc.FloatArrayList;
|
|||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomInts;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.DoubleField;
|
||||
|
@ -29,7 +30,6 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
|
@ -78,7 +78,7 @@ public class ChildrenQueryTests extends AbstractChildTestCase {
|
|||
ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)];
|
||||
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
|
||||
BitDocIdSetFilter parentFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent"))));
|
||||
Filter parentFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent")));
|
||||
int minChildren = random().nextInt(10);
|
||||
int maxChildren = scaledRandomIntBetween(minChildren, 10);
|
||||
Query query = new ChildrenQuery(parentChildIndexFieldData, "parent", "child", parentFilter, childQuery, scoreType, minChildren,
|
||||
|
|
|
@ -20,13 +20,13 @@ package org.elasticsearch.index.search.child;
|
|||
|
||||
import com.carrotsearch.hppc.IntIntHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
|
@ -73,7 +73,7 @@ public class ParentConstantScoreQueryTests extends AbstractChildTestCase {
|
|||
Query parentQuery = new TermQuery(new Term("field", "value"));
|
||||
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
|
||||
BitDocIdSetFilter childrenFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "child"))));
|
||||
Filter childrenFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "child")));
|
||||
Query query = new ParentConstantScoreQuery(parentChildIndexFieldData, parentQuery, "parent", childrenFilter);
|
||||
QueryUtils.check(query);
|
||||
}
|
||||
|
|
|
@ -21,13 +21,13 @@ package org.elasticsearch.index.search.child;
|
|||
import com.carrotsearch.hppc.FloatArrayList;
|
||||
import com.carrotsearch.hppc.IntIntHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
|
@ -73,7 +73,7 @@ public class ParentQueryTests extends AbstractChildTestCase {
|
|||
Query parentQuery = new TermQuery(new Term("field", "value"));
|
||||
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
|
||||
BitDocIdSetFilter childrenFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "child"))));
|
||||
Filter childrenFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "child")));
|
||||
Query query = new ParentQuery(parentChildIndexFieldData, parentQuery, "parent", childrenFilter);
|
||||
QueryUtils.check(query);
|
||||
}
|
||||
|
|
|
@ -322,10 +322,10 @@ public class NestedSortingTests extends AbstractFieldDataTestCase {
|
|||
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).utf8ToString(), equalTo("g"));
|
||||
|
||||
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
bq.add(parentFilter, Occur.MUST_NOT);
|
||||
bq.add(new TermQuery(new Term("filter_1", "T")), Occur.MUST);
|
||||
childFilter = new QueryWrapperFilter(bq);
|
||||
childFilter = new QueryWrapperFilter(bq.build());
|
||||
nestedComparatorSource = new BytesRefFieldComparatorSource(indexFieldData, null, sortMode, createNested(parentFilter, childFilter));
|
||||
query = new ToParentBlockJoinQuery(
|
||||
new FilteredQuery(new MatchAllDocsQuery(), childFilter),
|
||||
|
|
|
@ -131,10 +131,10 @@ public class NestedAggregatorTests extends ESSingleNodeTestCase {
|
|||
// A regular search always exclude nested docs, so we use NonNestedDocsFilter.INSTANCE here (otherwise MatchAllDocsQuery would be sufficient)
|
||||
// We exclude root doc with uid type#2, this will trigger the bug if we don't reset the root doc when we process a new segment, because
|
||||
// root doc type#3 and root doc type#1 have the same segment docid
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
bq.add(Queries.newNonNestedFilter(), Occur.MUST);
|
||||
bq.add(new TermQuery(new Term(UidFieldMapper.NAME, "type#2")), Occur.MUST_NOT);
|
||||
searcher.search(new ConstantScoreQuery(bq), collector);
|
||||
searcher.search(new ConstantScoreQuery(bq.build()), collector);
|
||||
collector.postCollection();
|
||||
|
||||
Nested nested = (Nested) aggs[0].buildAggregation(0);
|
||||
|
|
|
@ -34,8 +34,8 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TotalHitCountCollector;
|
||||
import org.apache.lucene.search.join.BitDocIdSetCachingWrapperFilter;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.search.join.QueryBitSetProducer;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext.NestedInnerHits.NestedChildrenQuery;
|
||||
|
@ -79,11 +79,11 @@ public class NestedChildrenFilterTests extends ESTestCase {
|
|||
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext();
|
||||
BitDocIdSetFilter parentFilter = new BitDocIdSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("type", "parent"))));
|
||||
BitSetProducer parentFilter = new QueryBitSetProducer(new TermQuery(new Term("type", "parent")));
|
||||
Filter childFilter = new QueryWrapperFilter(new TermQuery(new Term("type", "child")));
|
||||
int checkedParents = 0;
|
||||
for (LeafReaderContext leaf : reader.leaves()) {
|
||||
DocIdSetIterator parents = parentFilter.getDocIdSet(leaf).iterator();
|
||||
DocIdSetIterator parents = new QueryWrapperFilter(new TermQuery(new Term("type", "parent"))).getDocIdSet(leaf, null).iterator();
|
||||
for (int parentDoc = parents.nextDoc(); parentDoc != DocIdSetIterator.NO_MORE_DOCS ; parentDoc = parents.nextDoc()) {
|
||||
int expectedChildDocs = leaf.reader().document(parentDoc).getField("num_child_docs").numericValue().intValue();
|
||||
hitContext.reset(null, leaf, parentDoc, searcher);
|
||||
|
|
|
@ -96,9 +96,10 @@ public class QueryPhaseTests extends ESTestCase {
|
|||
Query matchAllCsq = new ConstantScoreQuery(matchAll);
|
||||
Query tq = new TermQuery(new Term("foo", "bar"));
|
||||
Query tCsq = new ConstantScoreQuery(tq);
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
bq.add(matchAll, Occur.SHOULD);
|
||||
bq.add(tq, Occur.MUST);
|
||||
BooleanQuery bq = new BooleanQuery.Builder()
|
||||
.add(matchAll, Occur.SHOULD)
|
||||
.add(tq, Occur.MUST)
|
||||
.build();
|
||||
|
||||
countTestCase(matchAll, reader, false);
|
||||
countTestCase(matchAllCsq, reader, false);
|
||||
|
|
Loading…
Reference in New Issue