Merge pull request #11224 from jpountz/enhancement/filtered_query_forbidden

Search: Make FilteredQuery a forbidden API.
This commit is contained in:
Adrien Grand 2015-05-19 15:35:24 +02:00
commit 52899c87ef
31 changed files with 223 additions and 328 deletions

View File

@ -131,3 +131,7 @@ java.util.concurrent.Future#cancel(boolean)
@defaultMessage Don't try reading from paths that are not configured in Environment, resolve from Environment instead @defaultMessage Don't try reading from paths that are not configured in Environment, resolve from Environment instead
org.elasticsearch.common.io.PathUtils#get(java.lang.String, java.lang.String[]) org.elasticsearch.common.io.PathUtils#get(java.lang.String, java.lang.String[])
org.elasticsearch.common.io.PathUtils#get(java.net.URI) org.elasticsearch.common.io.PathUtils#get(java.net.URI)
@defaultMessage Use queries, not filters
org.apache.lucene.search.FilteredQuery#<init>(org.apache.lucene.search.Query, org.apache.lucene.search.Filter)
org.apache.lucene.search.FilteredQuery#<init>(org.apache.lucene.search.Query, org.apache.lucene.search.Filter, org.apache.lucene.search.FilteredQuery$FilterStrategy)

View File

@ -39,6 +39,8 @@ import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentCommitInfo;
import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.search.Collector; import org.apache.lucene.search.Collector;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
@ -52,11 +54,13 @@ import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TimeLimitingCollector; import org.apache.lucene.search.TimeLimitingCollector;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.Lock; import org.apache.lucene.store.Lock;
import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Counter; import org.apache.lucene.util.Counter;
import org.apache.lucene.util.Version; import org.apache.lucene.util.Version;
@ -795,4 +799,74 @@ public class Lucene {
throw new UnsupportedOperationException("This IndexCommit does not support deletions"); throw new UnsupportedOperationException("This IndexCommit does not support deletions");
} }
} }
/**
* Is it an empty {@link DocIdSet}?
*/
public static boolean isEmpty(@Nullable DocIdSet set) {
return set == null || set == DocIdSet.EMPTY;
}
/**
* Given a {@link Scorer}, return a {@link Bits} instance that will match
* all documents contained in the set. Note that the returned {@link Bits}
* instance MUST be consumed in order.
*/
public static Bits asSequentialAccessBits(final int maxDoc, @Nullable Scorer scorer) throws IOException {
if (scorer == null) {
return new Bits.MatchNoBits(maxDoc);
}
final TwoPhaseIterator twoPhase = scorer.asTwoPhaseIterator();
final DocIdSetIterator iterator;
if (twoPhase == null) {
iterator = scorer;
} else {
iterator = twoPhase.approximation();
}
return new Bits() {
int previous = -1;
boolean previousMatched = false;
@Override
public boolean get(int index) {
if (index < 0 || index >= maxDoc) {
throw new IndexOutOfBoundsException(index + " is out of bounds: [" + 0 + "-" + maxDoc + "[");
}
if (index < previous) {
throw new IllegalArgumentException("This Bits instance can only be consumed in order. "
+ "Got called on [" + index + "] while previously called on [" + previous + "]");
}
if (index == previous) {
// we cache whether it matched because it is illegal to call
// twoPhase.matches() twice
return previousMatched;
}
previous = index;
int doc = iterator.docID();
if (doc < index) {
try {
doc = iterator.advance(index);
} catch (IOException e) {
throw new IllegalStateException("Cannot advance iterator", e);
}
}
if (index == doc) {
try {
return previousMatched = twoPhase == null || twoPhase.matches();
} catch (IOException e) {
throw new IllegalStateException("Cannot validate match", e);
}
}
return previousMatched = false;
}
@Override
public int length() {
return maxDoc;
}
};
}
} }

View File

@ -1,105 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.docset;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.util.Bits;
import org.elasticsearch.common.Nullable;
import java.io.IOException;
/**
*/
public class DocIdSets {
/**
* Is it an empty {@link DocIdSet}?
*/
public static boolean isEmpty(@Nullable DocIdSet set) {
return set == null || set == DocIdSet.EMPTY;
}
/**
* Given a {@link Scorer}, return a {@link Bits} instance that will match
* all documents contained in the set. Note that the returned {@link Bits}
* instance MUST be consumed in order.
*/
public static Bits asSequentialAccessBits(final int maxDoc, @Nullable Scorer scorer) throws IOException {
if (scorer == null) {
return new Bits.MatchNoBits(maxDoc);
}
final TwoPhaseIterator twoPhase = scorer.asTwoPhaseIterator();
final DocIdSetIterator iterator;
if (twoPhase == null) {
iterator = scorer;
} else {
iterator = twoPhase.approximation();
}
return new Bits() {
int previous = -1;
boolean previousMatched = false;
@Override
public boolean get(int index) {
if (index < 0 || index >= maxDoc) {
throw new IndexOutOfBoundsException(index + " is out of bounds: [" + 0 + "-" + maxDoc + "[");
}
if (index < previous) {
throw new IllegalArgumentException("This Bits instance can only be consumed in order. "
+ "Got called on [" + index + "] while previously called on [" + previous + "]");
}
if (index == previous) {
// we cache whether it matched because it is illegal to call
// twoPhase.matches() twice
return previousMatched;
}
previous = index;
int doc = iterator.docID();
if (doc < index) {
try {
doc = iterator.advance(index);
} catch (IOException e) {
throw new IllegalStateException("Cannot advance iterator", e);
}
}
if (index == doc) {
try {
return previousMatched = twoPhase == null || twoPhase.matches();
} catch (IOException e) {
throw new IllegalStateException("Cannot validate match", e);
}
}
return previousMatched = false;
}
@Override
public int length() {
return maxDoc;
}
};
}
}

View File

@ -19,9 +19,13 @@
package org.elasticsearch.common.lucene.search; package org.elasticsearch.common.lucene.search;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.*; import org.apache.lucene.search.Collector;
import org.apache.lucene.search.FilterLeafCollector;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.elasticsearch.common.lucene.docset.DocIdSets; import org.elasticsearch.common.lucene.Lucene;
import java.io.IOException; import java.io.IOException;
@ -42,7 +46,7 @@ public class FilteredCollector implements Collector {
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
final Scorer filterScorer = filter.scorer(context, null); final Scorer filterScorer = filter.scorer(context, null);
final LeafCollector in = collector.getLeafCollector(context); final LeafCollector in = collector.getLeafCollector(context);
final Bits bits = DocIdSets.asSequentialAccessBits(context.reader().maxDoc(), filterScorer); final Bits bits = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorer);
return new FilterLeafCollector(in) { return new FilterLeafCollector(in) {
@Override @Override

View File

@ -1,60 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.search;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.Bits;
import java.io.IOException;
/**
* A filter implementation that resolves details at the last possible moment between filter parsing and execution.
* For example a date filter based on 'now'.
*/
public abstract class ResolvableFilter extends Filter {
/**
* @return The actual filter instance to be executed containing the latest details.
*/
public abstract Filter resolve();
@Override
public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException {
Filter resolvedFilter = resolve();
if (resolvedFilter != null) {
return resolvedFilter.getDocIdSet(context, acceptDocs);
} else {
return null;
}
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
final Filter resolved = resolve();
if (resolved != null) {
return resolved;
}
return super.rewrite(reader);
}
}

View File

@ -19,16 +19,24 @@
package org.elasticsearch.common.lucene.search.function; package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.*; import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils; import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.common.lucene.docset.DocIdSets; import org.elasticsearch.common.lucene.Lucene;
import java.io.IOException; import java.io.IOException;
import java.util.*; import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Set;
/** /**
* A query that allows for a pluggable boost function / filter. If it matches * A query that allows for a pluggable boost function / filter. If it matches
@ -169,7 +177,7 @@ public class FiltersFunctionScoreQuery extends Query {
FilterFunction filterFunction = filterFunctions[i]; FilterFunction filterFunction = filterFunctions[i];
functions[i] = filterFunction.function.getLeafScoreFunction(context); functions[i] = filterFunction.function.getLeafScoreFunction(context);
Scorer filterScorer = filterWeights[i].scorer(context, null); // no need to apply accepted docs Scorer filterScorer = filterWeights[i].scorer(context, null); // no need to apply accepted docs
docSets[i] = DocIdSets.asSequentialAccessBits(context.reader().maxDoc(), filterScorer); docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorer);
} }
return new FiltersFunctionFactorScorer(this, subQueryScorer, scoreMode, filterFunctions, maxBoost, functions, docSets, combineFunction, minScore); return new FiltersFunctionFactorScorer(this, subQueryScorer, scoreMode, filterFunctions, maxBoost, functions, docSets, combineFunction, minScore);
} }
@ -193,7 +201,7 @@ public class FiltersFunctionScoreQuery extends Query {
weightSum++; weightSum++;
} }
Bits docSet = DocIdSets.asSequentialAccessBits(context.reader().maxDoc(), Bits docSet = Lucene.asSequentialAccessBits(context.reader().maxDoc(),
filterWeights[i].scorer(context, null)); filterWeights[i].scorer(context, null));
if (docSet.get(doc)) { if (docSet.get(doc)) {
Explanation functionExplanation = filterFunction.function.getLeafScoreFunction(context).explainScore(doc, subQueryExpl); Explanation functionExplanation = filterFunction.function.getLeafScoreFunction(context).explainScore(doc, subQueryExpl);

View File

@ -35,7 +35,7 @@ import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.ElasticsearchGenerationException;
@ -372,11 +372,11 @@ public class MapperService extends AbstractIndexComponent {
BooleanQuery bq = new BooleanQuery(); BooleanQuery bq = new BooleanQuery();
bq.add(percolatorType, Occur.MUST_NOT); bq.add(percolatorType, Occur.MUST_NOT);
bq.add(Queries.newNonNestedFilter(), Occur.MUST); bq.add(Queries.newNonNestedFilter(), Occur.MUST);
return new QueryWrapperFilter(bq); return new ConstantScoreQuery(bq);
} else if (hasNested) { } else if (hasNested) {
return Queries.newNonNestedFilter(); return Queries.newNonNestedFilter();
} else if (filterPercolateType) { } else if (filterPercolateType) {
return new QueryWrapperFilter(Queries.not(percolatorType)); return new ConstantScoreQuery(Queries.not(percolatorType));
} else { } else {
return null; return null;
} }
@ -390,7 +390,7 @@ public class MapperService extends AbstractIndexComponent {
BooleanQuery bq = new BooleanQuery(); BooleanQuery bq = new BooleanQuery();
bq.add(percolatorType, Occur.MUST_NOT); bq.add(percolatorType, Occur.MUST_NOT);
bq.add(filter, Occur.MUST); bq.add(filter, Occur.MUST);
return new QueryWrapperFilter(bq); return new ConstantScoreQuery(bq);
} else { } else {
return filter; return filter;
} }
@ -420,9 +420,9 @@ public class MapperService extends AbstractIndexComponent {
BooleanQuery bq = new BooleanQuery(); BooleanQuery bq = new BooleanQuery();
bq.add(percolatorType, Occur.MUST_NOT); bq.add(percolatorType, Occur.MUST_NOT);
bq.add(termsFilter, Occur.MUST); bq.add(termsFilter, Occur.MUST);
return new QueryWrapperFilter(bq); return new ConstantScoreQuery(bq);
} else { } else {
return new QueryWrapperFilter(termsFilter); return termsFilter;
} }
} else { } else {
// Current bool filter requires that at least one should clause matches, even with a must clause. // Current bool filter requires that at least one should clause matches, even with a must clause.
@ -442,7 +442,7 @@ public class MapperService extends AbstractIndexComponent {
bool.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST); bool.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST);
} }
return new QueryWrapperFilter(bool); return new ConstantScoreQuery(bool);
} }
} }

View File

@ -23,8 +23,8 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Booleans;
@ -37,8 +37,8 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.index.similarity.SimilarityProvider;
@ -201,11 +201,11 @@ public class BooleanFieldMapper extends AbstractFieldMapper<Boolean> {
} }
@Override @Override
public Filter nullValueFilter() { public Query nullValueFilter() {
if (nullValue == null) { if (nullValue == null) {
return null; return null;
} }
return new QueryWrapperFilter(new TermQuery(names().createIndexNameTerm(nullValue ? Values.TRUE : Values.FALSE))); return new ConstantScoreQuery(termQuery(nullValue, null));
} }
@Override @Override

View File

@ -24,10 +24,9 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
@ -208,14 +207,11 @@ public class ByteFieldMapper extends NumberFieldMapper<Byte> {
} }
@Override @Override
public Filter nullValueFilter() { public Query nullValueFilter() {
if (nullValue == null) { if (nullValue == null) {
return null; return null;
} }
return new QueryWrapperFilter(NumericRangeQuery.newIntRange(names.indexName(), precisionStep, return new ConstantScoreQuery(termQuery(nullValue, null));
nullValue.intValue(),
nullValue.intValue(),
true, true));
} }
@Override @Override

View File

@ -24,6 +24,7 @@ import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
@ -335,15 +336,11 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
} }
@Override @Override
public Filter nullValueFilter() { public Query nullValueFilter() {
if (nullValue == null) { if (nullValue == null) {
return null; return null;
} }
long value = parseStringValue(nullValue); return new ConstantScoreQuery(termQuery(nullValue, null));
return new QueryWrapperFilter(NumericRangeQuery.newLongRange(names.indexName(), precisionStep,
value,
value,
true, true));
} }

View File

@ -28,10 +28,9 @@ import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
@ -198,19 +197,16 @@ public class DoubleFieldMapper extends NumberFieldMapper<Double> {
includeLower, includeUpper); includeLower, includeUpper);
} }
public Filter rangeFilter(Double lowerTerm, Double upperTerm, boolean includeLower, boolean includeUpper) { public Query rangeFilter(Double lowerTerm, Double upperTerm, boolean includeLower, boolean includeUpper) {
return new QueryWrapperFilter(NumericRangeQuery.newDoubleRange(names.indexName(), precisionStep, lowerTerm, upperTerm, includeLower, includeUpper)); return NumericRangeQuery.newDoubleRange(names.indexName(), precisionStep, lowerTerm, upperTerm, includeLower, includeUpper);
} }
@Override @Override
public Filter nullValueFilter() { public Query nullValueFilter() {
if (nullValue == null) { if (nullValue == null) {
return null; return null;
} }
return new QueryWrapperFilter(NumericRangeQuery.newDoubleRange(names.indexName(), precisionStep, return new ConstantScoreQuery(termQuery(nullValue, null));
nullValue,
nullValue,
true, true));
} }
@Override @Override

View File

@ -28,10 +28,9 @@ import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
@ -209,14 +208,11 @@ public class FloatFieldMapper extends NumberFieldMapper<Float> {
} }
@Override @Override
public Filter nullValueFilter() { public Query nullValueFilter() {
if (nullValue == null) { if (nullValue == null) {
return null; return null;
} }
return new QueryWrapperFilter(NumericRangeQuery.newFloatRange(names.indexName(), precisionStep, return new ConstantScoreQuery(termQuery(nullValue, null));
nullValue,
nullValue,
true, true));
} }
@Override @Override

View File

@ -25,10 +25,9 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
@ -203,14 +202,11 @@ public class IntegerFieldMapper extends NumberFieldMapper<Integer> {
} }
@Override @Override
public Filter nullValueFilter() { public Query nullValueFilter() {
if (nullValue == null) { if (nullValue == null) {
return null; return null;
} }
return new QueryWrapperFilter(NumericRangeQuery.newIntRange(names.indexName(), precisionStep, return new ConstantScoreQuery(termQuery(nullValue, null));
nullValue,
nullValue,
true, true));
} }
@Override @Override

View File

@ -25,10 +25,9 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
@ -193,14 +192,11 @@ public class LongFieldMapper extends NumberFieldMapper<Long> {
} }
@Override @Override
public Filter nullValueFilter() { public Query nullValueFilter() {
if (nullValue == null) { if (nullValue == null) {
return null; return null;
} }
return new QueryWrapperFilter(NumericRangeQuery.newLongRange(names.indexName(), precisionStep, return new ConstantScoreQuery(termQuery(nullValue, null));
nullValue,
nullValue,
true, true));
} }
@Override @Override

View File

@ -25,10 +25,9 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
@ -209,14 +208,11 @@ public class ShortFieldMapper extends NumberFieldMapper<Short> {
} }
@Override @Override
public Filter nullValueFilter() { public Query nullValueFilter() {
if (nullValue == null) { if (nullValue == null) {
return null; return null;
} }
return new QueryWrapperFilter(NumericRangeQuery.newIntRange(names.indexName(), precisionStep, return new ConstantScoreQuery(termQuery(nullValue, null));
nullValue.intValue(),
nullValue.intValue(),
true, true));
} }
@Override @Override

View File

@ -25,10 +25,9 @@ import org.apache.lucene.analysis.NumericTokenStream;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
@ -250,15 +249,11 @@ public class IpFieldMapper extends NumberFieldMapper<Long> {
} }
@Override @Override
public Filter nullValueFilter() { public Query nullValueFilter() {
if (nullValue == null) { if (nullValue == null) {
return null; return null;
} }
final long value = ipToLong(nullValue); return new ConstantScoreQuery(termQuery(nullValue, null));
return new QueryWrapperFilter(NumericRangeQuery.newLongRange(names.indexName(), precisionStep,
value,
value,
true, true));
} }
@Override @Override

View File

@ -187,7 +187,7 @@ public class HasParentQueryParser implements QueryParser {
parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD); parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD);
} }
} }
parentFilter = new QueryWrapperFilter(parentsFilter); parentFilter = parentsFilter;
} }
if (parentFilter == null) { if (parentFilter == null) {

View File

@ -22,9 +22,7 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.TermRangeQuery;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
@ -112,7 +110,7 @@ public class MissingQueryParser implements QueryParser {
return null; return null;
} }
Filter existenceFilter = null; Query existenceFilter = null;
Query nullFilter = null; Query nullFilter = null;
if (existence) { if (existence) {
@ -139,8 +137,8 @@ public class MissingQueryParser implements QueryParser {
boolFilter.add(filter, BooleanClause.Occur.SHOULD); boolFilter.add(filter, BooleanClause.Occur.SHOULD);
} }
existenceFilter = new QueryWrapperFilter(boolFilter); existenceFilter = boolFilter;
existenceFilter = new QueryWrapperFilter(Queries.not(existenceFilter));; existenceFilter = Queries.not(existenceFilter);;
} }
if (nullValue) { if (nullValue) {

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.search.join.ToParentBlockJoinQuery; import org.apache.lucene.search.join.ToParentBlockJoinQuery;
@ -29,6 +28,7 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper; import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport; import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
@ -155,7 +155,7 @@ public class NestedQueryParser implements QueryParser {
} }
if (innerQuery != null) { if (innerQuery != null) {
return new ToParentBlockJoinQuery(new FilteredQuery(innerQuery, childFilter), parentFilter, scoreMode); return new ToParentBlockJoinQuery(Queries.filtered(innerQuery, childFilter), parentFilter, scoreMode);
} else { } else {
return null; return null;
} }

View File

@ -39,7 +39,7 @@ import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.LongBitSet; import org.apache.lucene.util.LongBitSet;
import org.elasticsearch.common.lucene.IndexCacheableQuery; import org.elasticsearch.common.lucene.IndexCacheableQuery;
import org.elasticsearch.common.lucene.docset.DocIdSets; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.NoopCollector; import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.index.fielddata.AtomicParentChildFieldData; import org.elasticsearch.index.fielddata.AtomicParentChildFieldData;
import org.elasticsearch.index.fielddata.IndexParentChildFieldData; import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
@ -208,7 +208,7 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery {
if (shortCircuitFilter != null) { if (shortCircuitFilter != null) {
DocIdSet docIdSet = shortCircuitFilter.getDocIdSet(context, acceptDocs); DocIdSet docIdSet = shortCircuitFilter.getDocIdSet(context, acceptDocs);
if (!DocIdSets.isEmpty(docIdSet)) { if (!Lucene.isEmpty(docIdSet)) {
DocIdSetIterator iterator = docIdSet.iterator(); DocIdSetIterator iterator = docIdSet.iterator();
if (iterator != null) { if (iterator != null) {
return ConstantScorer.create(iterator, this, queryWeight); return ConstantScorer.create(iterator, this, queryWeight);
@ -218,7 +218,7 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery {
} }
DocIdSet parentDocIdSet = this.parentFilter.getDocIdSet(context, acceptDocs); DocIdSet parentDocIdSet = this.parentFilter.getDocIdSet(context, acceptDocs);
if (!DocIdSets.isEmpty(parentDocIdSet)) { if (!Lucene.isEmpty(parentDocIdSet)) {
// We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining" // We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining"
// count down (short circuit) logic will then work as expected. // count down (short circuit) logic will then work as expected.
parentDocIdSet = BitsFilteredDocIdSet.wrap(parentDocIdSet, context.reader().getLiveDocs()); parentDocIdSet = BitsFilteredDocIdSet.wrap(parentDocIdSet, context.reader().getLiveDocs());

View File

@ -37,11 +37,10 @@ import org.apache.lucene.search.XFilteredDocIdSetIterator;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils; import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.IndexCacheableQuery; import org.elasticsearch.common.lucene.IndexCacheableQuery;
import org.elasticsearch.common.lucene.docset.DocIdSets; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.NoopCollector; import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.FloatArray; import org.elasticsearch.common.util.FloatArray;
@ -268,7 +267,7 @@ public final class ChildrenQuery extends IndexCacheableQuery {
@Override @Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
DocIdSet parentsSet = parentFilter.getDocIdSet(context, acceptDocs); DocIdSet parentsSet = parentFilter.getDocIdSet(context, acceptDocs);
if (DocIdSets.isEmpty(parentsSet) || remaining == 0) { if (Lucene.isEmpty(parentsSet) || remaining == 0) {
return null; return null;
} }

View File

@ -35,7 +35,7 @@ import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.LongBitSet; import org.apache.lucene.util.LongBitSet;
import org.elasticsearch.common.lucene.IndexCacheableQuery; import org.elasticsearch.common.lucene.IndexCacheableQuery;
import org.elasticsearch.common.lucene.docset.DocIdSets; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.NoopCollector; import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.index.fielddata.AtomicParentChildFieldData; import org.elasticsearch.index.fielddata.AtomicParentChildFieldData;
import org.elasticsearch.index.fielddata.IndexParentChildFieldData; import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
@ -176,7 +176,7 @@ public class ParentConstantScoreQuery extends IndexCacheableQuery {
@Override @Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
DocIdSet childrenDocIdSet = childrenFilter.getDocIdSet(context, acceptDocs); DocIdSet childrenDocIdSet = childrenFilter.getDocIdSet(context, acceptDocs);
if (DocIdSets.isEmpty(childrenDocIdSet)) { if (Lucene.isEmpty(childrenDocIdSet)) {
return null; return null;
} }

View File

@ -39,7 +39,7 @@ import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.IndexCacheableQuery; import org.elasticsearch.common.lucene.IndexCacheableQuery;
import org.elasticsearch.common.lucene.docset.DocIdSets; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.NoopCollector; import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.FloatArray; import org.elasticsearch.common.util.FloatArray;
@ -248,7 +248,7 @@ public class ParentQuery extends IndexCacheableQuery {
@Override @Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
DocIdSet childrenDocSet = childrenFilter.getDocIdSet(context, acceptDocs); DocIdSet childrenDocSet = childrenFilter.getDocIdSet(context, acceptDocs);
if (DocIdSets.isEmpty(childrenDocSet)) { if (Lucene.isEmpty(childrenDocSet)) {
return null; return null;
} }
final DocIdSetIterator childIterator = childrenDocSet.iterator(); final DocIdSetIterator childIterator = childrenDocSet.iterator();

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.search.geo; package org.elasticsearch.index.search.geo;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.ConstantScoreScorer; import org.apache.lucene.search.ConstantScoreScorer;
import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.ConstantScoreWeight;
@ -125,11 +126,16 @@ public class GeoDistanceRangeQuery extends Query {
return indexFieldData.getFieldNames().indexName(); return indexFieldData.getFieldNames().indexName();
} }
@Override
public Query rewrite(IndexReader reader) throws IOException {
return super.rewrite(reader);
}
@Override @Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
final Weight boundingBoxWeight; final Weight boundingBoxWeight;
if (boundingBoxFilter != null) { if (boundingBoxFilter != null) {
boundingBoxWeight = boundingBoxFilter.createWeight(searcher, false); boundingBoxWeight = searcher.createNormalizedWeight(boundingBoxFilter, false);
} else { } else {
boundingBoxWeight = null; boundingBoxWeight = null;
} }

View File

@ -26,7 +26,7 @@ import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight; import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.docset.DocIdSets; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongArray;
import org.elasticsearch.common.util.LongObjectPagedHashMap; import org.elasticsearch.common.util.LongObjectPagedHashMap;
import org.elasticsearch.index.search.child.ConstantScorer; import org.elasticsearch.index.search.child.ConstantScorer;
@ -110,7 +110,7 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator {
final SortedDocValues globalOrdinals = valuesSource.globalOrdinalsValues(parentType, ctx); final SortedDocValues globalOrdinals = valuesSource.globalOrdinalsValues(parentType, ctx);
assert globalOrdinals != null; assert globalOrdinals != null;
Scorer parentScorer = parentFilter.scorer(ctx, null); Scorer parentScorer = parentFilter.scorer(ctx, null);
final Bits parentDocs = DocIdSets.asSequentialAccessBits(ctx.reader().maxDoc(), parentScorer); final Bits parentDocs = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), parentScorer);
if (childFilter.scorer(ctx, null) != null) { if (childFilter.scorer(ctx, null) != null) {
replay.add(ctx); replay.add(ctx);
} }

View File

@ -22,7 +22,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.Weight; import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.elasticsearch.common.lucene.docset.DocIdSets; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -58,7 +58,7 @@ public class FilterAggregator extends SingleBucketAggregator {
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, public LeafBucketCollector getLeafCollector(LeafReaderContext ctx,
final LeafBucketCollector sub) throws IOException { final LeafBucketCollector sub) throws IOException {
// no need to provide deleted docs to the filter // no need to provide deleted docs to the filter
final Bits bits = DocIdSets.asSequentialAccessBits(ctx.reader().maxDoc(), filter.scorer(ctx, null)); final Bits bits = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filter.scorer(ctx, null));
return new LeafBucketCollectorBase(sub, null) { return new LeafBucketCollectorBase(sub, null) {
@Override @Override
public void collect(int doc, long bucket) throws IOException { public void collect(int doc, long bucket) throws IOException {

View File

@ -25,8 +25,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.Weight; import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.elasticsearch.common.lucene.docset.DocIdSets; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -82,7 +81,7 @@ public class FiltersAggregator extends BucketsAggregator {
// no need to provide deleted docs to the filter // no need to provide deleted docs to the filter
final Bits[] bits = new Bits[filters.length]; final Bits[] bits = new Bits[filters.length];
for (int i = 0; i < filters.length; ++i) { for (int i = 0; i < filters.length; ++i) {
bits[i] = DocIdSets.asSequentialAccessBits(ctx.reader().maxDoc(), filters[i].scorer(ctx, null)); bits[i] = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filters[i].scorer(ctx, null));
} }
return new LeafBucketCollectorBase(sub, null) { return new LeafBucketCollectorBase(sub, null) {
@Override @Override

View File

@ -25,7 +25,7 @@ import org.apache.lucene.search.Filter;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.elasticsearch.common.lucene.docset.DocIdSets; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
@ -67,7 +67,7 @@ public class NestedAggregator extends SingleBucketAggregator {
this.parentFilter = null; this.parentFilter = null;
// In ES if parent is deleted, then also the children are deleted. Therefore acceptedDocs can also null here. // In ES if parent is deleted, then also the children are deleted. Therefore acceptedDocs can also null here.
DocIdSet childDocIdSet = childFilter.getDocIdSet(ctx, null); DocIdSet childDocIdSet = childFilter.getDocIdSet(ctx, null);
if (DocIdSets.isEmpty(childDocIdSet)) { if (Lucene.isEmpty(childDocIdSet)) {
childDocs = null; childDocs = null;
} else { } else {
childDocs = childDocIdSet.iterator(); childDocs = childDocIdSet.iterator();
@ -97,7 +97,7 @@ public class NestedAggregator extends SingleBucketAggregator {
} }
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(parentFilterNotCached); parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(parentFilterNotCached);
BitDocIdSet parentSet = parentFilter.getDocIdSet(ctx); BitDocIdSet parentSet = parentFilter.getDocIdSet(ctx);
if (DocIdSets.isEmpty(parentSet)) { if (Lucene.isEmpty(parentSet)) {
// There are no parentDocs in the segment, so return and set childDocs to null, so we exit early for future invocations. // There are no parentDocs in the segment, so return and set childDocs to null, so we exit early for future invocations.
childDocs = null; childDocs = null;
return; return;

View File

@ -26,7 +26,7 @@ import org.apache.lucene.search.Filter;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.elasticsearch.common.lucene.docset.DocIdSets; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
@ -72,7 +72,7 @@ public class ReverseNestedAggregator extends SingleBucketAggregator {
// must belong to parent docs that is alive. For this reason acceptedDocs can be null here. // must belong to parent docs that is alive. For this reason acceptedDocs can be null here.
BitDocIdSet docIdSet = parentFilter.getDocIdSet(ctx); BitDocIdSet docIdSet = parentFilter.getDocIdSet(ctx);
final BitSet parentDocs; final BitSet parentDocs;
if (DocIdSets.isEmpty(docIdSet)) { if (Lucene.isEmpty(docIdSet)) {
return LeafBucketCollector.NO_OP_COLLECTOR; return LeafBucketCollector.NO_OP_COLLECTOR;
} else { } else {
parentDocs = docIdSet.bits(); parentDocs = docIdSet.bits();

View File

@ -26,16 +26,20 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreScorer;
import org.apache.lucene.search.ConstantScoreWeight;
import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopDocsCollector;
import org.apache.lucene.search.TopFieldCollector; import org.apache.lucene.search.TopFieldCollector;
import org.apache.lucene.search.TopScoreDocCollector; import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
@ -132,7 +136,7 @@ public final class InnerHitsContext {
} }
BitDocIdSetFilter parentFilter = context.bitsetFilterCache().getBitDocIdSetFilter(rawParentFilter); BitDocIdSetFilter parentFilter = context.bitsetFilterCache().getBitDocIdSetFilter(rawParentFilter);
Filter childFilter = childObjectMapper.nestedTypeFilter(); Filter childFilter = childObjectMapper.nestedTypeFilter();
Query q = new FilteredQuery(query, new NestedChildrenFilter(parentFilter, childFilter, hitContext)); Query q = Queries.filtered(query, new NestedChildrenQuery(parentFilter, childFilter, hitContext));
if (size() == 0) { if (size() == 0) {
return new TopDocs(context.searcher().count(q), Lucene.EMPTY_SCORE_DOCS, 0); return new TopDocs(context.searcher().count(q), Lucene.EMPTY_SCORE_DOCS, 0);
@ -154,18 +158,18 @@ public final class InnerHitsContext {
} }
// A filter that only emits the nested children docs of a specific nested parent doc // A filter that only emits the nested children docs of a specific nested parent doc
static class NestedChildrenFilter extends Filter { static class NestedChildrenQuery extends Query {
private final BitDocIdSetFilter parentFilter; private final BitDocIdSetFilter parentFilter;
private final Filter childFilter; private final Filter childFilter;
private final int docId; private final int docId;
private final LeafReader atomicReader; private final LeafReader leafReader;
NestedChildrenFilter(BitDocIdSetFilter parentFilter, Filter childFilter, FetchSubPhase.HitContext hitContext) { NestedChildrenQuery(BitDocIdSetFilter parentFilter, Filter childFilter, FetchSubPhase.HitContext hitContext) {
this.parentFilter = parentFilter; this.parentFilter = parentFilter;
this.childFilter = childFilter; this.childFilter = childFilter;
this.docId = hitContext.docId(); this.docId = hitContext.docId();
this.atomicReader = hitContext.readerContext().reader(); this.leafReader = hitContext.readerContext().reader();
} }
@Override @Override
@ -173,11 +177,11 @@ public final class InnerHitsContext {
if (super.equals(obj) == false) { if (super.equals(obj) == false) {
return false; return false;
} }
NestedChildrenFilter other = (NestedChildrenFilter) obj; NestedChildrenQuery other = (NestedChildrenQuery) obj;
return parentFilter.equals(other.parentFilter) return parentFilter.equals(other.parentFilter)
&& childFilter.equals(other.childFilter) && childFilter.equals(other.childFilter)
&& docId == other.docId && docId == other.docId
&& atomicReader.getCoreCacheKey() == other.atomicReader.getCoreCacheKey(); && leafReader.getCoreCacheKey() == other.leafReader.getCoreCacheKey();
} }
@Override @Override
@ -186,7 +190,7 @@ public final class InnerHitsContext {
hash = 31 * hash + parentFilter.hashCode(); hash = 31 * hash + parentFilter.hashCode();
hash = 31 * hash + childFilter.hashCode(); hash = 31 * hash + childFilter.hashCode();
hash = 31 * hash + docId; hash = 31 * hash + docId;
hash = 31 * hash + atomicReader.getCoreCacheKey().hashCode(); hash = 31 * hash + leafReader.getCoreCacheKey().hashCode();
return hash; return hash;
} }
@ -196,9 +200,12 @@ public final class InnerHitsContext {
} }
@Override @Override
public DocIdSet getDocIdSet(LeafReaderContext context, final Bits acceptDocs) throws IOException { public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new ConstantScoreWeight(this) {
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
// Nested docs only reside in a single segment, so no need to evaluate all segments // Nested docs only reside in a single segment, so no need to evaluate all segments
if (!context.reader().getCoreCacheKey().equals(this.atomicReader.getCoreCacheKey())) { if (!context.reader().getCoreCacheKey().equals(leafReader.getCoreCacheKey())) {
return null; return null;
} }
@ -223,27 +230,18 @@ public final class InnerHitsContext {
if (childrenIterator == null) { if (childrenIterator == null) {
return null; return null;
} }
return new DocIdSet() { final DocIdSetIterator it = new DocIdSetIterator() {
@Override int doc = -1;
public long ramBytesUsed() {
return parents.ramBytesUsed() + children.ramBytesUsed();
}
@Override
public DocIdSetIterator iterator() throws IOException {
return new DocIdSetIterator() {
int currentDocId = -1;
@Override @Override
public int docID() { public int docID() {
return currentDocId; return doc;
} }
@Override @Override
public int nextDoc() throws IOException { public int nextDoc() throws IOException {
return advance(currentDocId + 1); return advance(doc + 1);
} }
@Override @Override
@ -251,23 +249,25 @@ public final class InnerHitsContext {
target = Math.max(firstChildDocId, target); target = Math.max(firstChildDocId, target);
if (target >= docId) { if (target >= docId) {
// We're outside the child nested scope, so it is done // We're outside the child nested scope, so it is done
return currentDocId = NO_MORE_DOCS; return doc = NO_MORE_DOCS;
} else { } else {
int advanced = childrenIterator.advance(target); int advanced = childrenIterator.advance(target);
if (advanced >= docId) { if (advanced >= docId) {
// We're outside the child nested scope, so it is done // We're outside the child nested scope, so it is done
return currentDocId = NO_MORE_DOCS; return doc = NO_MORE_DOCS;
} else { } else {
return currentDocId = advanced; return doc = advanced;
} }
} }
} }
@Override @Override
public long cost() { public long cost() {
return childrenIterator.cost(); return Math.min(childrenIterator.cost(), docId - firstChildDocId);
} }
}; };
return new ConstantScoreScorer(this, score(), it);
} }
}; };
} }

View File

@ -38,7 +38,7 @@ import org.apache.lucene.search.join.BitDocIdSetCachingWrapperFilter;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext.NestedInnerHits.NestedChildrenFilter; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext.NestedInnerHits.NestedChildrenQuery;
import org.elasticsearch.test.ElasticsearchTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test; import org.junit.Test;
@ -87,7 +87,7 @@ public class NestedChildrenFilterTest extends ElasticsearchTestCase {
for (int parentDoc = parents.nextDoc(); parentDoc != DocIdSetIterator.NO_MORE_DOCS ; parentDoc = parents.nextDoc()) { for (int parentDoc = parents.nextDoc(); parentDoc != DocIdSetIterator.NO_MORE_DOCS ; parentDoc = parents.nextDoc()) {
int expectedChildDocs = leaf.reader().document(parentDoc).getField("num_child_docs").numericValue().intValue(); int expectedChildDocs = leaf.reader().document(parentDoc).getField("num_child_docs").numericValue().intValue();
hitContext.reset(null, leaf, parentDoc, searcher); hitContext.reset(null, leaf, parentDoc, searcher);
NestedChildrenFilter nestedChildrenFilter = new NestedChildrenFilter(parentFilter, childFilter, hitContext); NestedChildrenQuery nestedChildrenFilter = new NestedChildrenQuery(parentFilter, childFilter, hitContext);
TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector(); TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
searcher.search(new ConstantScoreQuery(nestedChildrenFilter), totalHitCountCollector); searcher.search(new ConstantScoreQuery(nestedChildrenFilter), totalHitCountCollector);
assertThat(totalHitCountCollector.getTotalHits(), equalTo(expectedChildDocs)); assertThat(totalHitCountCollector.getTotalHits(), equalTo(expectedChildDocs));