Merge pull request #11224 from jpountz/enhancement/filtered_query_forbidden
Search: Make FilteredQuery a forbidden API.
This commit is contained in:
commit
52899c87ef
|
@ -131,3 +131,7 @@ java.util.concurrent.Future#cancel(boolean)
|
|||
@defaultMessage Don't try reading from paths that are not configured in Environment, resolve from Environment instead
|
||||
org.elasticsearch.common.io.PathUtils#get(java.lang.String, java.lang.String[])
|
||||
org.elasticsearch.common.io.PathUtils#get(java.net.URI)
|
||||
|
||||
@defaultMessage Use queries, not filters
|
||||
org.apache.lucene.search.FilteredQuery#<init>(org.apache.lucene.search.Query, org.apache.lucene.search.Filter)
|
||||
org.apache.lucene.search.FilteredQuery#<init>(org.apache.lucene.search.Query, org.apache.lucene.search.Filter, org.apache.lucene.search.FilteredQuery$FilterStrategy)
|
||||
|
|
|
@ -39,6 +39,8 @@ import org.apache.lucene.index.NoMergePolicy;
|
|||
import org.apache.lucene.index.SegmentCommitInfo;
|
||||
import org.apache.lucene.index.SegmentInfos;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.Filter;
|
||||
|
@ -52,11 +54,13 @@ import org.apache.lucene.search.SortField;
|
|||
import org.apache.lucene.search.TimeLimitingCollector;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopFieldDocs;
|
||||
import org.apache.lucene.search.TwoPhaseIterator;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.Lock;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.Counter;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
@ -795,4 +799,74 @@ public class Lucene {
|
|||
throw new UnsupportedOperationException("This IndexCommit does not support deletions");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Is it an empty {@link DocIdSet}?
|
||||
*/
|
||||
public static boolean isEmpty(@Nullable DocIdSet set) {
|
||||
return set == null || set == DocIdSet.EMPTY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a {@link Scorer}, return a {@link Bits} instance that will match
|
||||
* all documents contained in the set. Note that the returned {@link Bits}
|
||||
* instance MUST be consumed in order.
|
||||
*/
|
||||
public static Bits asSequentialAccessBits(final int maxDoc, @Nullable Scorer scorer) throws IOException {
|
||||
if (scorer == null) {
|
||||
return new Bits.MatchNoBits(maxDoc);
|
||||
}
|
||||
final TwoPhaseIterator twoPhase = scorer.asTwoPhaseIterator();
|
||||
final DocIdSetIterator iterator;
|
||||
if (twoPhase == null) {
|
||||
iterator = scorer;
|
||||
} else {
|
||||
iterator = twoPhase.approximation();
|
||||
}
|
||||
|
||||
return new Bits() {
|
||||
|
||||
int previous = -1;
|
||||
boolean previousMatched = false;
|
||||
|
||||
@Override
|
||||
public boolean get(int index) {
|
||||
if (index < 0 || index >= maxDoc) {
|
||||
throw new IndexOutOfBoundsException(index + " is out of bounds: [" + 0 + "-" + maxDoc + "[");
|
||||
}
|
||||
if (index < previous) {
|
||||
throw new IllegalArgumentException("This Bits instance can only be consumed in order. "
|
||||
+ "Got called on [" + index + "] while previously called on [" + previous + "]");
|
||||
}
|
||||
if (index == previous) {
|
||||
// we cache whether it matched because it is illegal to call
|
||||
// twoPhase.matches() twice
|
||||
return previousMatched;
|
||||
}
|
||||
previous = index;
|
||||
|
||||
int doc = iterator.docID();
|
||||
if (doc < index) {
|
||||
try {
|
||||
doc = iterator.advance(index);
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("Cannot advance iterator", e);
|
||||
}
|
||||
}
|
||||
if (index == doc) {
|
||||
try {
|
||||
return previousMatched = twoPhase == null || twoPhase.matches();
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("Cannot validate match", e);
|
||||
}
|
||||
}
|
||||
return previousMatched = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int length() {
|
||||
return maxDoc;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,105 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.lucene.docset;
|
||||
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.TwoPhaseIterator;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class DocIdSets {
|
||||
|
||||
/**
|
||||
* Is it an empty {@link DocIdSet}?
|
||||
*/
|
||||
public static boolean isEmpty(@Nullable DocIdSet set) {
|
||||
return set == null || set == DocIdSet.EMPTY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a {@link Scorer}, return a {@link Bits} instance that will match
|
||||
* all documents contained in the set. Note that the returned {@link Bits}
|
||||
* instance MUST be consumed in order.
|
||||
*/
|
||||
public static Bits asSequentialAccessBits(final int maxDoc, @Nullable Scorer scorer) throws IOException {
|
||||
if (scorer == null) {
|
||||
return new Bits.MatchNoBits(maxDoc);
|
||||
}
|
||||
final TwoPhaseIterator twoPhase = scorer.asTwoPhaseIterator();
|
||||
final DocIdSetIterator iterator;
|
||||
if (twoPhase == null) {
|
||||
iterator = scorer;
|
||||
} else {
|
||||
iterator = twoPhase.approximation();
|
||||
}
|
||||
|
||||
return new Bits() {
|
||||
|
||||
int previous = -1;
|
||||
boolean previousMatched = false;
|
||||
|
||||
@Override
|
||||
public boolean get(int index) {
|
||||
if (index < 0 || index >= maxDoc) {
|
||||
throw new IndexOutOfBoundsException(index + " is out of bounds: [" + 0 + "-" + maxDoc + "[");
|
||||
}
|
||||
if (index < previous) {
|
||||
throw new IllegalArgumentException("This Bits instance can only be consumed in order. "
|
||||
+ "Got called on [" + index + "] while previously called on [" + previous + "]");
|
||||
}
|
||||
if (index == previous) {
|
||||
// we cache whether it matched because it is illegal to call
|
||||
// twoPhase.matches() twice
|
||||
return previousMatched;
|
||||
}
|
||||
previous = index;
|
||||
|
||||
int doc = iterator.docID();
|
||||
if (doc < index) {
|
||||
try {
|
||||
doc = iterator.advance(index);
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("Cannot advance iterator", e);
|
||||
}
|
||||
}
|
||||
if (index == doc) {
|
||||
try {
|
||||
return previousMatched = twoPhase == null || twoPhase.matches();
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("Cannot validate match", e);
|
||||
}
|
||||
}
|
||||
return previousMatched = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int length() {
|
||||
return maxDoc;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
|
@ -19,9 +19,13 @@
|
|||
package org.elasticsearch.common.lucene.search;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.FilterLeafCollector;
|
||||
import org.apache.lucene.search.LeafCollector;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -42,7 +46,7 @@ public class FilteredCollector implements Collector {
|
|||
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
|
||||
final Scorer filterScorer = filter.scorer(context, null);
|
||||
final LeafCollector in = collector.getLeafCollector(context);
|
||||
final Bits bits = DocIdSets.asSequentialAccessBits(context.reader().maxDoc(), filterScorer);
|
||||
final Bits bits = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorer);
|
||||
|
||||
return new FilterLeafCollector(in) {
|
||||
@Override
|
||||
|
|
|
@ -1,60 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.lucene.search;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.Bits;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* A filter implementation that resolves details at the last possible moment between filter parsing and execution.
|
||||
* For example a date filter based on 'now'.
|
||||
*/
|
||||
public abstract class ResolvableFilter extends Filter {
|
||||
|
||||
/**
|
||||
* @return The actual filter instance to be executed containing the latest details.
|
||||
*/
|
||||
public abstract Filter resolve();
|
||||
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException {
|
||||
Filter resolvedFilter = resolve();
|
||||
if (resolvedFilter != null) {
|
||||
return resolvedFilter.getDocIdSet(context, acceptDocs);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query rewrite(IndexReader reader) throws IOException {
|
||||
final Filter resolved = resolve();
|
||||
if (resolved != null) {
|
||||
return resolved;
|
||||
}
|
||||
return super.rewrite(reader);
|
||||
}
|
||||
}
|
|
@ -19,16 +19,24 @@
|
|||
|
||||
package org.elasticsearch.common.lucene.search.function;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.ToStringUtils;
|
||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* A query that allows for a pluggable boost function / filter. If it matches
|
||||
|
@ -169,7 +177,7 @@ public class FiltersFunctionScoreQuery extends Query {
|
|||
FilterFunction filterFunction = filterFunctions[i];
|
||||
functions[i] = filterFunction.function.getLeafScoreFunction(context);
|
||||
Scorer filterScorer = filterWeights[i].scorer(context, null); // no need to apply accepted docs
|
||||
docSets[i] = DocIdSets.asSequentialAccessBits(context.reader().maxDoc(), filterScorer);
|
||||
docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorer);
|
||||
}
|
||||
return new FiltersFunctionFactorScorer(this, subQueryScorer, scoreMode, filterFunctions, maxBoost, functions, docSets, combineFunction, minScore);
|
||||
}
|
||||
|
@ -193,7 +201,7 @@ public class FiltersFunctionScoreQuery extends Query {
|
|||
weightSum++;
|
||||
}
|
||||
|
||||
Bits docSet = DocIdSets.asSequentialAccessBits(context.reader().maxDoc(),
|
||||
Bits docSet = Lucene.asSequentialAccessBits(context.reader().maxDoc(),
|
||||
filterWeights[i].scorer(context, null));
|
||||
if (docSet.get(doc)) {
|
||||
Explanation functionExplanation = filterFunction.function.getLeafScoreFunction(context).explainScore(doc, subQueryExpl);
|
||||
|
|
|
@ -35,7 +35,7 @@ import org.apache.lucene.search.BooleanClause;
|
|||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
|
@ -372,11 +372,11 @@ public class MapperService extends AbstractIndexComponent {
|
|||
BooleanQuery bq = new BooleanQuery();
|
||||
bq.add(percolatorType, Occur.MUST_NOT);
|
||||
bq.add(Queries.newNonNestedFilter(), Occur.MUST);
|
||||
return new QueryWrapperFilter(bq);
|
||||
return new ConstantScoreQuery(bq);
|
||||
} else if (hasNested) {
|
||||
return Queries.newNonNestedFilter();
|
||||
} else if (filterPercolateType) {
|
||||
return new QueryWrapperFilter(Queries.not(percolatorType));
|
||||
return new ConstantScoreQuery(Queries.not(percolatorType));
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
@ -390,7 +390,7 @@ public class MapperService extends AbstractIndexComponent {
|
|||
BooleanQuery bq = new BooleanQuery();
|
||||
bq.add(percolatorType, Occur.MUST_NOT);
|
||||
bq.add(filter, Occur.MUST);
|
||||
return new QueryWrapperFilter(bq);
|
||||
return new ConstantScoreQuery(bq);
|
||||
} else {
|
||||
return filter;
|
||||
}
|
||||
|
@ -420,9 +420,9 @@ public class MapperService extends AbstractIndexComponent {
|
|||
BooleanQuery bq = new BooleanQuery();
|
||||
bq.add(percolatorType, Occur.MUST_NOT);
|
||||
bq.add(termsFilter, Occur.MUST);
|
||||
return new QueryWrapperFilter(bq);
|
||||
return new ConstantScoreQuery(bq);
|
||||
} else {
|
||||
return new QueryWrapperFilter(termsFilter);
|
||||
return termsFilter;
|
||||
}
|
||||
} else {
|
||||
// Current bool filter requires that at least one should clause matches, even with a must clause.
|
||||
|
@ -442,7 +442,7 @@ public class MapperService extends AbstractIndexComponent {
|
|||
bool.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST);
|
||||
}
|
||||
|
||||
return new QueryWrapperFilter(bool);
|
||||
return new ConstantScoreQuery(bool);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,8 +23,8 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
|
@ -37,8 +37,8 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
|
||||
|
@ -201,11 +201,11 @@ public class BooleanFieldMapper extends AbstractFieldMapper<Boolean> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter nullValueFilter() {
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new QueryWrapperFilter(new TermQuery(names().createIndexNameTerm(nullValue ? Values.TRUE : Values.FALSE)));
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -24,10 +24,9 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
|
@ -208,14 +207,11 @@ public class ByteFieldMapper extends NumberFieldMapper<Byte> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter nullValueFilter() {
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new QueryWrapperFilter(NumericRangeQuery.newIntRange(names.indexName(), precisionStep,
|
||||
nullValue.intValue(),
|
||||
nullValue.intValue(),
|
||||
true, true));
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.document.FieldType;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -335,15 +336,11 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter nullValueFilter() {
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
long value = parseStringValue(nullValue);
|
||||
return new QueryWrapperFilter(NumericRangeQuery.newLongRange(names.indexName(), precisionStep,
|
||||
value,
|
||||
value,
|
||||
true, true));
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -28,10 +28,9 @@ import org.apache.lucene.document.FieldType;
|
|||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
|
@ -198,19 +197,16 @@ public class DoubleFieldMapper extends NumberFieldMapper<Double> {
|
|||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
public Filter rangeFilter(Double lowerTerm, Double upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
return new QueryWrapperFilter(NumericRangeQuery.newDoubleRange(names.indexName(), precisionStep, lowerTerm, upperTerm, includeLower, includeUpper));
|
||||
public Query rangeFilter(Double lowerTerm, Double upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
return NumericRangeQuery.newDoubleRange(names.indexName(), precisionStep, lowerTerm, upperTerm, includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Filter nullValueFilter() {
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new QueryWrapperFilter(NumericRangeQuery.newDoubleRange(names.indexName(), precisionStep,
|
||||
nullValue,
|
||||
nullValue,
|
||||
true, true));
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -28,10 +28,9 @@ import org.apache.lucene.document.FieldType;
|
|||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
|
@ -209,14 +208,11 @@ public class FloatFieldMapper extends NumberFieldMapper<Float> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter nullValueFilter() {
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new QueryWrapperFilter(NumericRangeQuery.newFloatRange(names.indexName(), precisionStep,
|
||||
nullValue,
|
||||
nullValue,
|
||||
true, true));
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -25,10 +25,9 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
|
@ -203,14 +202,11 @@ public class IntegerFieldMapper extends NumberFieldMapper<Integer> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter nullValueFilter() {
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new QueryWrapperFilter(NumericRangeQuery.newIntRange(names.indexName(), precisionStep,
|
||||
nullValue,
|
||||
nullValue,
|
||||
true, true));
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -25,10 +25,9 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
|
@ -193,14 +192,11 @@ public class LongFieldMapper extends NumberFieldMapper<Long> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter nullValueFilter() {
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new QueryWrapperFilter(NumericRangeQuery.newLongRange(names.indexName(), precisionStep,
|
||||
nullValue,
|
||||
nullValue,
|
||||
true, true));
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -25,10 +25,9 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
|
@ -209,14 +208,11 @@ public class ShortFieldMapper extends NumberFieldMapper<Short> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter nullValueFilter() {
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new QueryWrapperFilter(NumericRangeQuery.newIntRange(names.indexName(), precisionStep,
|
||||
nullValue.intValue(),
|
||||
nullValue.intValue(),
|
||||
true, true));
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -25,10 +25,9 @@ import org.apache.lucene.analysis.NumericTokenStream;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
|
@ -250,15 +249,11 @@ public class IpFieldMapper extends NumberFieldMapper<Long> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter nullValueFilter() {
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
final long value = ipToLong(nullValue);
|
||||
return new QueryWrapperFilter(NumericRangeQuery.newLongRange(names.indexName(), precisionStep,
|
||||
value,
|
||||
value,
|
||||
true, true));
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -187,7 +187,7 @@ public class HasParentQueryParser implements QueryParser {
|
|||
parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
}
|
||||
parentFilter = new QueryWrapperFilter(parentsFilter);
|
||||
parentFilter = parentsFilter;
|
||||
}
|
||||
|
||||
if (parentFilter == null) {
|
||||
|
|
|
@ -22,9 +22,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.TermRangeQuery;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
|
@ -112,7 +110,7 @@ public class MissingQueryParser implements QueryParser {
|
|||
return null;
|
||||
}
|
||||
|
||||
Filter existenceFilter = null;
|
||||
Query existenceFilter = null;
|
||||
Query nullFilter = null;
|
||||
|
||||
if (existence) {
|
||||
|
@ -139,8 +137,8 @@ public class MissingQueryParser implements QueryParser {
|
|||
boolFilter.add(filter, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
|
||||
existenceFilter = new QueryWrapperFilter(boolFilter);
|
||||
existenceFilter = new QueryWrapperFilter(Queries.not(existenceFilter));;
|
||||
existenceFilter = boolFilter;
|
||||
existenceFilter = Queries.not(existenceFilter);;
|
||||
}
|
||||
|
||||
if (nullValue) {
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.FilteredQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.apache.lucene.search.join.ToParentBlockJoinQuery;
|
||||
|
@ -29,6 +28,7 @@ import org.elasticsearch.common.ParseField;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
|
||||
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
|
||||
|
@ -155,7 +155,7 @@ public class NestedQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
if (innerQuery != null) {
|
||||
return new ToParentBlockJoinQuery(new FilteredQuery(innerQuery, childFilter), parentFilter, scoreMode);
|
||||
return new ToParentBlockJoinQuery(Queries.filtered(innerQuery, childFilter), parentFilter, scoreMode);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ import org.apache.lucene.search.join.BitDocIdSetFilter;
|
|||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.LongBitSet;
|
||||
import org.elasticsearch.common.lucene.IndexCacheableQuery;
|
||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||
import org.elasticsearch.index.fielddata.AtomicParentChildFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
|
||||
|
@ -208,7 +208,7 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery {
|
|||
|
||||
if (shortCircuitFilter != null) {
|
||||
DocIdSet docIdSet = shortCircuitFilter.getDocIdSet(context, acceptDocs);
|
||||
if (!DocIdSets.isEmpty(docIdSet)) {
|
||||
if (!Lucene.isEmpty(docIdSet)) {
|
||||
DocIdSetIterator iterator = docIdSet.iterator();
|
||||
if (iterator != null) {
|
||||
return ConstantScorer.create(iterator, this, queryWeight);
|
||||
|
@ -218,7 +218,7 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery {
|
|||
}
|
||||
|
||||
DocIdSet parentDocIdSet = this.parentFilter.getDocIdSet(context, acceptDocs);
|
||||
if (!DocIdSets.isEmpty(parentDocIdSet)) {
|
||||
if (!Lucene.isEmpty(parentDocIdSet)) {
|
||||
// We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining"
|
||||
// count down (short circuit) logic will then work as expected.
|
||||
parentDocIdSet = BitsFilteredDocIdSet.wrap(parentDocIdSet, context.reader().getLiveDocs());
|
||||
|
|
|
@ -37,11 +37,10 @@ import org.apache.lucene.search.XFilteredDocIdSetIterator;
|
|||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.ToStringUtils;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.lucene.IndexCacheableQuery;
|
||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.FloatArray;
|
||||
|
@ -268,7 +267,7 @@ public final class ChildrenQuery extends IndexCacheableQuery {
|
|||
@Override
|
||||
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
|
||||
DocIdSet parentsSet = parentFilter.getDocIdSet(context, acceptDocs);
|
||||
if (DocIdSets.isEmpty(parentsSet) || remaining == 0) {
|
||||
if (Lucene.isEmpty(parentsSet) || remaining == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@ import org.apache.lucene.search.Weight;
|
|||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.LongBitSet;
|
||||
import org.elasticsearch.common.lucene.IndexCacheableQuery;
|
||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||
import org.elasticsearch.index.fielddata.AtomicParentChildFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
|
||||
|
@ -176,7 +176,7 @@ public class ParentConstantScoreQuery extends IndexCacheableQuery {
|
|||
@Override
|
||||
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
|
||||
DocIdSet childrenDocIdSet = childrenFilter.getDocIdSet(context, acceptDocs);
|
||||
if (DocIdSets.isEmpty(childrenDocIdSet)) {
|
||||
if (Lucene.isEmpty(childrenDocIdSet)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ import org.apache.lucene.util.ToStringUtils;
|
|||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.lucene.IndexCacheableQuery;
|
||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.FloatArray;
|
||||
|
@ -248,7 +248,7 @@ public class ParentQuery extends IndexCacheableQuery {
|
|||
@Override
|
||||
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
|
||||
DocIdSet childrenDocSet = childrenFilter.getDocIdSet(context, acceptDocs);
|
||||
if (DocIdSets.isEmpty(childrenDocSet)) {
|
||||
if (Lucene.isEmpty(childrenDocSet)) {
|
||||
return null;
|
||||
}
|
||||
final DocIdSetIterator childIterator = childrenDocSet.iterator();
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.search.geo;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.ConstantScoreScorer;
|
||||
import org.apache.lucene.search.ConstantScoreWeight;
|
||||
|
@ -125,11 +126,16 @@ public class GeoDistanceRangeQuery extends Query {
|
|||
return indexFieldData.getFieldNames().indexName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query rewrite(IndexReader reader) throws IOException {
|
||||
return super.rewrite(reader);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
final Weight boundingBoxWeight;
|
||||
if (boundingBoxFilter != null) {
|
||||
boundingBoxWeight = boundingBoxFilter.createWeight(searcher, false);
|
||||
boundingBoxWeight = searcher.createNormalizedWeight(boundingBoxFilter, false);
|
||||
} else {
|
||||
boundingBoxWeight = null;
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.apache.lucene.search.Scorer;
|
|||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.util.LongArray;
|
||||
import org.elasticsearch.common.util.LongObjectPagedHashMap;
|
||||
import org.elasticsearch.index.search.child.ConstantScorer;
|
||||
|
@ -110,7 +110,7 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator {
|
|||
final SortedDocValues globalOrdinals = valuesSource.globalOrdinalsValues(parentType, ctx);
|
||||
assert globalOrdinals != null;
|
||||
Scorer parentScorer = parentFilter.scorer(ctx, null);
|
||||
final Bits parentDocs = DocIdSets.asSequentialAccessBits(ctx.reader().maxDoc(), parentScorer);
|
||||
final Bits parentDocs = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), parentScorer);
|
||||
if (childFilter.scorer(ctx, null) != null) {
|
||||
replay.add(ctx);
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ import org.apache.lucene.index.LeafReaderContext;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
|
@ -58,7 +58,7 @@ public class FilterAggregator extends SingleBucketAggregator {
|
|||
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx,
|
||||
final LeafBucketCollector sub) throws IOException {
|
||||
// no need to provide deleted docs to the filter
|
||||
final Bits bits = DocIdSets.asSequentialAccessBits(ctx.reader().maxDoc(), filter.scorer(ctx, null));
|
||||
final Bits bits = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filter.scorer(ctx, null));
|
||||
return new LeafBucketCollectorBase(sub, null) {
|
||||
@Override
|
||||
public void collect(int doc, long bucket) throws IOException {
|
||||
|
|
|
@ -25,8 +25,7 @@ import org.apache.lucene.index.LeafReaderContext;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
|
@ -82,7 +81,7 @@ public class FiltersAggregator extends BucketsAggregator {
|
|||
// no need to provide deleted docs to the filter
|
||||
final Bits[] bits = new Bits[filters.length];
|
||||
for (int i = 0; i < filters.length; ++i) {
|
||||
bits[i] = DocIdSets.asSequentialAccessBits(ctx.reader().maxDoc(), filters[i].scorer(ctx, null));
|
||||
bits[i] = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filters[i].scorer(ctx, null));
|
||||
}
|
||||
return new LeafBucketCollectorBase(sub, null) {
|
||||
@Override
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.lucene.search.Filter;
|
|||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.util.BitDocIdSet;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
|
@ -67,7 +67,7 @@ public class NestedAggregator extends SingleBucketAggregator {
|
|||
this.parentFilter = null;
|
||||
// In ES if parent is deleted, then also the children are deleted. Therefore acceptedDocs can also null here.
|
||||
DocIdSet childDocIdSet = childFilter.getDocIdSet(ctx, null);
|
||||
if (DocIdSets.isEmpty(childDocIdSet)) {
|
||||
if (Lucene.isEmpty(childDocIdSet)) {
|
||||
childDocs = null;
|
||||
} else {
|
||||
childDocs = childDocIdSet.iterator();
|
||||
|
@ -97,7 +97,7 @@ public class NestedAggregator extends SingleBucketAggregator {
|
|||
}
|
||||
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(parentFilterNotCached);
|
||||
BitDocIdSet parentSet = parentFilter.getDocIdSet(ctx);
|
||||
if (DocIdSets.isEmpty(parentSet)) {
|
||||
if (Lucene.isEmpty(parentSet)) {
|
||||
// There are no parentDocs in the segment, so return and set childDocs to null, so we exit early for future invocations.
|
||||
childDocs = null;
|
||||
return;
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.apache.lucene.search.Filter;
|
|||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.util.BitDocIdSet;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
|
@ -72,7 +72,7 @@ public class ReverseNestedAggregator extends SingleBucketAggregator {
|
|||
// must belong to parent docs that is alive. For this reason acceptedDocs can be null here.
|
||||
BitDocIdSet docIdSet = parentFilter.getDocIdSet(ctx);
|
||||
final BitSet parentDocs;
|
||||
if (DocIdSets.isEmpty(docIdSet)) {
|
||||
if (Lucene.isEmpty(docIdSet)) {
|
||||
return LeafBucketCollector.NO_OP_COLLECTOR;
|
||||
} else {
|
||||
parentDocs = docIdSet.bits();
|
||||
|
|
|
@ -26,16 +26,20 @@ import org.apache.lucene.index.LeafReaderContext;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.ConstantScoreScorer;
|
||||
import org.apache.lucene.search.ConstantScoreWeight;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.FilteredQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopDocsCollector;
|
||||
import org.apache.lucene.search.TopFieldCollector;
|
||||
import org.apache.lucene.search.TopScoreDocCollector;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.apache.lucene.util.Bits;
|
||||
|
@ -132,7 +136,7 @@ public final class InnerHitsContext {
|
|||
}
|
||||
BitDocIdSetFilter parentFilter = context.bitsetFilterCache().getBitDocIdSetFilter(rawParentFilter);
|
||||
Filter childFilter = childObjectMapper.nestedTypeFilter();
|
||||
Query q = new FilteredQuery(query, new NestedChildrenFilter(parentFilter, childFilter, hitContext));
|
||||
Query q = Queries.filtered(query, new NestedChildrenQuery(parentFilter, childFilter, hitContext));
|
||||
|
||||
if (size() == 0) {
|
||||
return new TopDocs(context.searcher().count(q), Lucene.EMPTY_SCORE_DOCS, 0);
|
||||
|
@ -154,18 +158,18 @@ public final class InnerHitsContext {
|
|||
}
|
||||
|
||||
// A filter that only emits the nested children docs of a specific nested parent doc
|
||||
static class NestedChildrenFilter extends Filter {
|
||||
static class NestedChildrenQuery extends Query {
|
||||
|
||||
private final BitDocIdSetFilter parentFilter;
|
||||
private final Filter childFilter;
|
||||
private final int docId;
|
||||
private final LeafReader atomicReader;
|
||||
private final LeafReader leafReader;
|
||||
|
||||
NestedChildrenFilter(BitDocIdSetFilter parentFilter, Filter childFilter, FetchSubPhase.HitContext hitContext) {
|
||||
NestedChildrenQuery(BitDocIdSetFilter parentFilter, Filter childFilter, FetchSubPhase.HitContext hitContext) {
|
||||
this.parentFilter = parentFilter;
|
||||
this.childFilter = childFilter;
|
||||
this.docId = hitContext.docId();
|
||||
this.atomicReader = hitContext.readerContext().reader();
|
||||
this.leafReader = hitContext.readerContext().reader();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -173,11 +177,11 @@ public final class InnerHitsContext {
|
|||
if (super.equals(obj) == false) {
|
||||
return false;
|
||||
}
|
||||
NestedChildrenFilter other = (NestedChildrenFilter) obj;
|
||||
NestedChildrenQuery other = (NestedChildrenQuery) obj;
|
||||
return parentFilter.equals(other.parentFilter)
|
||||
&& childFilter.equals(other.childFilter)
|
||||
&& docId == other.docId
|
||||
&& atomicReader.getCoreCacheKey() == other.atomicReader.getCoreCacheKey();
|
||||
&& leafReader.getCoreCacheKey() == other.leafReader.getCoreCacheKey();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -186,7 +190,7 @@ public final class InnerHitsContext {
|
|||
hash = 31 * hash + parentFilter.hashCode();
|
||||
hash = 31 * hash + childFilter.hashCode();
|
||||
hash = 31 * hash + docId;
|
||||
hash = 31 * hash + atomicReader.getCoreCacheKey().hashCode();
|
||||
hash = 31 * hash + leafReader.getCoreCacheKey().hashCode();
|
||||
return hash;
|
||||
}
|
||||
|
||||
|
@ -196,9 +200,12 @@ public final class InnerHitsContext {
|
|||
}
|
||||
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(LeafReaderContext context, final Bits acceptDocs) throws IOException {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new ConstantScoreWeight(this) {
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
|
||||
// Nested docs only reside in a single segment, so no need to evaluate all segments
|
||||
if (!context.reader().getCoreCacheKey().equals(this.atomicReader.getCoreCacheKey())) {
|
||||
if (!context.reader().getCoreCacheKey().equals(leafReader.getCoreCacheKey())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -223,27 +230,18 @@ public final class InnerHitsContext {
|
|||
if (childrenIterator == null) {
|
||||
return null;
|
||||
}
|
||||
return new DocIdSet() {
|
||||
final DocIdSetIterator it = new DocIdSetIterator() {
|
||||
|
||||
@Override
|
||||
public long ramBytesUsed() {
|
||||
return parents.ramBytesUsed() + children.ramBytesUsed();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocIdSetIterator iterator() throws IOException {
|
||||
return new DocIdSetIterator() {
|
||||
|
||||
int currentDocId = -1;
|
||||
int doc = -1;
|
||||
|
||||
@Override
|
||||
public int docID() {
|
||||
return currentDocId;
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int nextDoc() throws IOException {
|
||||
return advance(currentDocId + 1);
|
||||
return advance(doc + 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -251,23 +249,25 @@ public final class InnerHitsContext {
|
|||
target = Math.max(firstChildDocId, target);
|
||||
if (target >= docId) {
|
||||
// We're outside the child nested scope, so it is done
|
||||
return currentDocId = NO_MORE_DOCS;
|
||||
return doc = NO_MORE_DOCS;
|
||||
} else {
|
||||
int advanced = childrenIterator.advance(target);
|
||||
if (advanced >= docId) {
|
||||
// We're outside the child nested scope, so it is done
|
||||
return currentDocId = NO_MORE_DOCS;
|
||||
return doc = NO_MORE_DOCS;
|
||||
} else {
|
||||
return currentDocId = advanced;
|
||||
return doc = advanced;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long cost() {
|
||||
return childrenIterator.cost();
|
||||
return Math.min(childrenIterator.cost(), docId - firstChildDocId);
|
||||
}
|
||||
|
||||
};
|
||||
return new ConstantScoreScorer(this, score(), it);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@ import org.apache.lucene.search.join.BitDocIdSetCachingWrapperFilter;
|
|||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext.NestedInnerHits.NestedChildrenFilter;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext.NestedInnerHits.NestedChildrenQuery;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -87,7 +87,7 @@ public class NestedChildrenFilterTest extends ElasticsearchTestCase {
|
|||
for (int parentDoc = parents.nextDoc(); parentDoc != DocIdSetIterator.NO_MORE_DOCS ; parentDoc = parents.nextDoc()) {
|
||||
int expectedChildDocs = leaf.reader().document(parentDoc).getField("num_child_docs").numericValue().intValue();
|
||||
hitContext.reset(null, leaf, parentDoc, searcher);
|
||||
NestedChildrenFilter nestedChildrenFilter = new NestedChildrenFilter(parentFilter, childFilter, hitContext);
|
||||
NestedChildrenQuery nestedChildrenFilter = new NestedChildrenQuery(parentFilter, childFilter, hitContext);
|
||||
TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
|
||||
searcher.search(new ConstantScoreQuery(nestedChildrenFilter), totalHitCountCollector);
|
||||
assertThat(totalHitCountCollector.getTotalHits(), equalTo(expectedChildDocs));
|
||||
|
|
Loading…
Reference in New Issue