Upgrade to lucene-5.2-snapshot-1675363.

This snapshot contains in particular LUCENE-6446 (refactored explanation API)
and LUCENE-6448 (better equals/hashcode for filters).
This commit is contained in:
Adrien Grand 2015-04-22 16:10:27 +02:00
parent 05c3d05cff
commit dd679a3a38
57 changed files with 348 additions and 390 deletions

View File

@ -32,7 +32,7 @@
<properties>
<lucene.version>5.2.0</lucene.version>
<lucene.snapshot.revision>1675100</lucene.snapshot.revision>
<lucene.snapshot.revision>1675363</lucene.snapshot.revision>
<lucene.maven.version>5.2.0-snapshot-${lucene.snapshot.revision}</lucene.maven.version>
<testframework.version>2.1.14</testframework.version>
<tests.jvms>auto</tests.jvms>

View File

@ -39,7 +39,6 @@ import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.SegmentCommitInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Filter;
@ -530,48 +529,29 @@ public class Lucene {
}
public static Explanation readExplanation(StreamInput in) throws IOException {
Explanation explanation;
if (in.readBoolean()) {
Boolean match = in.readOptionalBoolean();
explanation = new ComplexExplanation();
((ComplexExplanation) explanation).setMatch(match);
boolean match = in.readBoolean();
String description = in.readString();
final Explanation[] subExplanations = new Explanation[in.readVInt()];
for (int i = 0; i < subExplanations.length; ++i) {
subExplanations[i] = readExplanation(in);
}
if (match) {
return Explanation.match(in.readFloat(), description, subExplanations);
} else {
explanation = new Explanation();
return Explanation.noMatch(description, subExplanations);
}
explanation.setValue(in.readFloat());
explanation.setDescription(in.readString());
if (in.readBoolean()) {
int size = in.readVInt();
for (int i = 0; i < size; i++) {
explanation.addDetail(readExplanation(in));
}
}
return explanation;
}
public static void writeExplanation(StreamOutput out, Explanation explanation) throws IOException {
if (explanation instanceof ComplexExplanation) {
out.writeBoolean(true);
out.writeOptionalBoolean(((ComplexExplanation) explanation).getMatch());
} else {
out.writeBoolean(false);
}
out.writeFloat(explanation.getValue());
if (explanation.getDescription() == null) {
throw new ElasticsearchIllegalArgumentException("Explanation descriptions should NOT be null\n[" + explanation.toString() + "]");
}
out.writeBoolean(explanation.isMatch());
out.writeString(explanation.getDescription());
Explanation[] subExplanations = explanation.getDetails();
if (subExplanations == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeVInt(subExplanations.length);
for (Explanation subExp : subExplanations) {
writeExplanation(out, subExp);
}
out.writeVInt(subExplanations.length);
for (Explanation subExp : subExplanations) {
writeExplanation(out, subExp);
}
if (explanation.isMatch()) {
out.writeFloat(explanation.getValue());
}
}

View File

@ -19,17 +19,20 @@
package org.elasticsearch.common.lucene.search;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.search.child.CustomQueryWrappingFilter;
@ -58,6 +61,14 @@ public class Queries {
return wrap(newMatchNoDocsQuery());
}
public static Filter newNestedFilter() {
return wrap(new PrefixQuery(new Term(TypeFieldMapper.NAME, new BytesRef("__"))));
}
public static Filter newNonNestedFilter() {
return wrap(not(newNestedFilter()));
}
/** Return a query that matches all documents but those that match the given query. */
public static Query not(Query q) {
BooleanQuery bq = new BooleanQuery();

View File

@ -37,8 +37,6 @@ public abstract class ResolvableFilter extends Filter {
*/
public abstract Filter resolve();
@Override
public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException {
Filter resolvedFilter = resolve();

View File

@ -52,9 +52,7 @@ public class BoostScoreFunction extends ScoreFunction {
@Override
public Explanation explainScore(int docId, Explanation subQueryScore) {
Explanation exp = new Explanation(boost, "static boost factor");
exp.addDetail(new Explanation(boost, "boostFactor"));
return exp;
return Explanation.match(boost, "static boost factor", Explanation.match(boost, "boostFactor"));
}
};
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
public enum CombineFunction {
@ -35,16 +34,15 @@ public enum CombineFunction {
}
@Override
public ComplexExplanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
float score = queryBoost * Math.min(funcExpl.getValue(), maxBoost) * queryExpl.getValue();
ComplexExplanation res = new ComplexExplanation(true, score, "function score, product of:");
res.addDetail(queryExpl);
ComplexExplanation minExpl = new ComplexExplanation(true, Math.min(funcExpl.getValue(), maxBoost), "Math.min of");
minExpl.addDetail(funcExpl);
minExpl.addDetail(new Explanation(maxBoost, "maxBoost"));
res.addDetail(minExpl);
res.addDetail(new Explanation(queryBoost, "queryBoost"));
return res;
Explanation boostExpl = Explanation.match(maxBoost, "maxBoost");
Explanation minExpl = Explanation.match(
Math.min(funcExpl.getValue(), maxBoost),
"min of:",
funcExpl, boostExpl);
return Explanation.match(score, "function score, product of:",
queryExpl, minExpl, Explanation.match(queryBoost, "queryBoost"));
}
},
REPLACE {
@ -59,15 +57,15 @@ public enum CombineFunction {
}
@Override
public ComplexExplanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
float score = queryBoost * Math.min(funcExpl.getValue(), maxBoost);
ComplexExplanation res = new ComplexExplanation(true, score, "function score, product of:");
ComplexExplanation minExpl = new ComplexExplanation(true, Math.min(funcExpl.getValue(), maxBoost), "Math.min of");
minExpl.addDetail(funcExpl);
minExpl.addDetail(new Explanation(maxBoost, "maxBoost"));
res.addDetail(minExpl);
res.addDetail(new Explanation(queryBoost, "queryBoost"));
return res;
Explanation boostExpl = Explanation.match(maxBoost, "maxBoost");
Explanation minExpl = Explanation.match(
Math.min(funcExpl.getValue(), maxBoost),
"min of:",
funcExpl, boostExpl);
return Explanation.match(score, "function score, product of:",
minExpl, Explanation.match(queryBoost, "queryBoost"));
}
},
@ -83,19 +81,14 @@ public enum CombineFunction {
}
@Override
public ComplexExplanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
float score = queryBoost * (Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue());
ComplexExplanation res = new ComplexExplanation(true, score, "function score, product of:");
ComplexExplanation minExpl = new ComplexExplanation(true, Math.min(funcExpl.getValue(), maxBoost), "Math.min of");
minExpl.addDetail(funcExpl);
minExpl.addDetail(new Explanation(maxBoost, "maxBoost"));
ComplexExplanation sumExpl = new ComplexExplanation(true, Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue(),
"sum of");
sumExpl.addDetail(queryExpl);
sumExpl.addDetail(minExpl);
res.addDetail(sumExpl);
res.addDetail(new Explanation(queryBoost, "queryBoost"));
return res;
Explanation minExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost), "min of:",
funcExpl, Explanation.match(maxBoost, "maxBoost"));
Explanation sumExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue(), "sum of",
queryExpl, minExpl);
return Explanation.match(score, "function score, product of:",
sumExpl, Explanation.match(queryBoost, "queryBoost"));
}
},
@ -111,19 +104,15 @@ public enum CombineFunction {
}
@Override
public ComplexExplanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
float score = toFloat(queryBoost * (queryExpl.getValue() + Math.min(funcExpl.getValue(), maxBoost)) / 2.0);
ComplexExplanation res = new ComplexExplanation(true, score, "function score, product of:");
ComplexExplanation minExpl = new ComplexExplanation(true, Math.min(funcExpl.getValue(), maxBoost), "Math.min of");
minExpl.addDetail(funcExpl);
minExpl.addDetail(new Explanation(maxBoost, "maxBoost"));
ComplexExplanation avgExpl = new ComplexExplanation(true,
toFloat((Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue()) / 2.0), "avg of");
avgExpl.addDetail(queryExpl);
avgExpl.addDetail(minExpl);
res.addDetail(avgExpl);
res.addDetail(new Explanation(queryBoost, "queryBoost"));
return res;
Explanation minExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost), "min of:",
funcExpl, Explanation.match(maxBoost, "maxBoost"));
Explanation avgExpl = Explanation.match(
toFloat((Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue()) / 2.0), "avg of",
queryExpl, minExpl);
return Explanation.match(score, "function score, product of:",
avgExpl, Explanation.match(queryBoost, "queryBoost"));
}
},
@ -139,19 +128,16 @@ public enum CombineFunction {
}
@Override
public ComplexExplanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
float score = toFloat(queryBoost * Math.min(queryExpl.getValue(), Math.min(funcExpl.getValue(), maxBoost)));
ComplexExplanation res = new ComplexExplanation(true, score, "function score, product of:");
ComplexExplanation innerMinExpl = new ComplexExplanation(true, Math.min(funcExpl.getValue(), maxBoost), "Math.min of");
innerMinExpl.addDetail(funcExpl);
innerMinExpl.addDetail(new Explanation(maxBoost, "maxBoost"));
ComplexExplanation outerMinExpl = new ComplexExplanation(true, Math.min(Math.min(funcExpl.getValue(), maxBoost),
queryExpl.getValue()), "min of");
outerMinExpl.addDetail(queryExpl);
outerMinExpl.addDetail(innerMinExpl);
res.addDetail(outerMinExpl);
res.addDetail(new Explanation(queryBoost, "queryBoost"));
return res;
Explanation innerMinExpl = Explanation.match(
Math.min(funcExpl.getValue(), maxBoost), "min of:",
funcExpl, Explanation.match(maxBoost, "maxBoost"));
Explanation outerMinExpl = Explanation.match(
Math.min(Math.min(funcExpl.getValue(), maxBoost), queryExpl.getValue()), "min of",
queryExpl, innerMinExpl);
return Explanation.match(score, "function score, product of:",
outerMinExpl, Explanation.match(queryBoost, "queryBoost"));
}
},
@ -167,19 +153,16 @@ public enum CombineFunction {
}
@Override
public ComplexExplanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
float score = toFloat(queryBoost * Math.max(queryExpl.getValue(), Math.min(funcExpl.getValue(), maxBoost)));
ComplexExplanation res = new ComplexExplanation(true, score, "function score, product of:");
ComplexExplanation innerMinExpl = new ComplexExplanation(true, Math.min(funcExpl.getValue(), maxBoost), "Math.min of");
innerMinExpl.addDetail(funcExpl);
innerMinExpl.addDetail(new Explanation(maxBoost, "maxBoost"));
ComplexExplanation outerMaxExpl = new ComplexExplanation(true, Math.max(Math.min(funcExpl.getValue(), maxBoost),
queryExpl.getValue()), "max of");
outerMaxExpl.addDetail(queryExpl);
outerMaxExpl.addDetail(innerMinExpl);
res.addDetail(outerMaxExpl);
res.addDetail(new Explanation(queryBoost, "queryBoost"));
return res;
Explanation innerMinExpl = Explanation.match(
Math.min(funcExpl.getValue(), maxBoost), "min of:",
funcExpl, Explanation.match(maxBoost, "maxBoost"));
Explanation outerMaxExpl = Explanation.match(
Math.max(Math.min(funcExpl.getValue(), maxBoost), queryExpl.getValue()), "max of:",
queryExpl, innerMinExpl);
return Explanation.match(score, "function score, product of:",
outerMaxExpl, Explanation.match(queryBoost, "queryBoost"));
}
};
@ -198,5 +181,5 @@ public enum CombineFunction {
return Double.compare(floatVersion, input) == 0 || input == 0.0d ? 0 : 1.d - (floatVersion) / input;
}
public abstract ComplexExplanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost);
public abstract Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost);
}

View File

@ -70,13 +70,11 @@ public class FieldValueFactorFunction extends ScoreFunction {
@Override
public Explanation explainScore(int docId, Explanation subQueryScore) {
Explanation exp = new Explanation();
String modifierStr = modifier != null ? modifier.toString() : "";
double score = score(docId, subQueryScore.getValue());
exp.setValue(CombineFunction.toFloat(score));
exp.setDescription("field value function: " +
modifierStr + "(" + "doc['" + field + "'].value * factor=" + boostFactor + ")");
return exp;
return Explanation.match(
CombineFunction.toFloat(score),
"field value function: " + modifierStr + "(" + "doc['" + field + "'].value * factor=" + boostFactor + ")");
}
};
}

View File

@ -175,7 +175,7 @@ public class FiltersFunctionScoreQuery extends Query {
return subQueryExpl;
}
// First: Gather explanations for all filters
List<ComplexExplanation> filterExplanations = new ArrayList<>();
List<Explanation> filterExplanations = new ArrayList<>();
float weightSum = 0;
for (FilterFunction filterFunction : filterFunctions) {
@ -191,18 +191,16 @@ public class FiltersFunctionScoreQuery extends Query {
Explanation functionExplanation = filterFunction.function.getLeafScoreFunction(context).explainScore(doc, subQueryExpl);
double factor = functionExplanation.getValue();
float sc = CombineFunction.toFloat(factor);
ComplexExplanation filterExplanation = new ComplexExplanation(true, sc, "function score, product of:");
filterExplanation.addDetail(new Explanation(1.0f, "match filter: " + filterFunction.filter.toString()));
filterExplanation.addDetail(functionExplanation);
Explanation filterExplanation = Explanation.match(sc, "function score, product of:",
Explanation.match(1.0f, "match filter: " + filterFunction.filter.toString()), functionExplanation);
filterExplanations.add(filterExplanation);
}
}
if (filterExplanations.size() == 0) {
float sc = getBoost() * subQueryExpl.getValue();
Explanation res = new ComplexExplanation(true, sc, "function score, no filter match, product of:");
res.addDetail(subQueryExpl);
res.addDetail(new Explanation(getBoost(), "queryBoost"));
return res;
return Explanation.match(sc, "function score, no filter match, product of:",
subQueryExpl,
Explanation.match(getBoost(), "queryBoost"));
}
// Second: Compute the factor that would have been computed by the
@ -242,12 +240,11 @@ public class FiltersFunctionScoreQuery extends Query {
}
}
}
ComplexExplanation factorExplanaition = new ComplexExplanation(true, CombineFunction.toFloat(factor),
"function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]");
for (int i = 0; i < filterExplanations.size(); i++) {
factorExplanaition.addDetail(filterExplanations.get(i));
}
return combineFunction.explain(getBoost(), subQueryExpl, factorExplanaition, maxBoost);
Explanation factorExplanation = Explanation.match(
CombineFunction.toFloat(factor),
"function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]",
filterExplanations);
return combineFunction.explain(getBoost(), subQueryExpl, factorExplanation, maxBoost);
}
}

View File

@ -74,9 +74,9 @@ public class RandomScoreFunction extends ScoreFunction {
@Override
public Explanation explainScore(int docId, Explanation subQueryScore) {
Explanation exp = new Explanation();
exp.setDescription("random score function (seed: " + originalSeed + ")");
return exp;
return Explanation.match(
CombineFunction.toFloat(score(docId, subQueryScore.getValue())),
"random score function (seed: " + originalSeed + ")");
}
};
}

View File

@ -117,10 +117,12 @@ public class ScriptScoreFunction extends ScoreFunction {
if (params != null) {
explanation += "\" and parameters: \n" + params.toString();
}
exp = new Explanation(CombineFunction.toFloat(score), explanation);
Explanation scoreExp = new Explanation(subQueryScore.getValue(), "_score: ");
scoreExp.addDetail(subQueryScore);
exp.addDetail(scoreExp);
Explanation scoreExp = Explanation.match(
subQueryScore.getValue(), "_score: ",
subQueryScore);
return Explanation.match(
CombineFunction.toFloat(score), explanation,
scoreExp);
}
return exp;
}

View File

@ -20,7 +20,6 @@
package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
@ -65,18 +64,16 @@ public class WeightFactorFunction extends ScoreFunction {
@Override
public Explanation explainScore(int docId, Explanation subQueryScore) throws IOException {
Explanation functionScoreExplanation;
Explanation functionExplanation = leafFunction.explainScore(docId, subQueryScore);
functionScoreExplanation = new ComplexExplanation(true, functionExplanation.getValue() * (float) getWeight(), "product of:");
functionScoreExplanation.addDetail(functionExplanation);
functionScoreExplanation.addDetail(explainWeight());
return functionScoreExplanation;
return Explanation.match(
functionExplanation.getValue() * (float) getWeight(), "product of:",
functionExplanation, explainWeight());
}
};
}
public Explanation explainWeight() {
return new Explanation(getWeight(), "weight");
return Explanation.match(getWeight(), "weight");
}
public float getWeight() {
@ -99,7 +96,7 @@ public class WeightFactorFunction extends ScoreFunction {
@Override
public Explanation explainScore(int docId, Explanation subQueryScore) {
return new Explanation(1.0f, "constant score 1.0 - no function provided");
return Explanation.match(1.0f, "constant score 1.0 - no function provided");
}
};
}

View File

@ -23,6 +23,7 @@ import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSet;
@ -36,19 +37,19 @@ import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.NoCacheFilter;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardUtils;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesWarmer;
import org.elasticsearch.indices.IndicesWarmer.TerminationHandle;
import org.elasticsearch.threadpool.ThreadPool;
@ -266,7 +267,7 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
}
if (hasNested) {
warmUp.add(NonNestedDocsFilter.INSTANCE);
warmUp.add(Queries.newNonNestedFilter());
}
final Executor executor = threadPool.executor(executor());

View File

@ -212,13 +212,13 @@ public class WeightedFilterCache extends AbstractIndexComponent implements Filte
@Override
public boolean equals(Object o) {
if (!(o instanceof FilterCacheFilterWrapper)) return false;
if (super.equals(o) == false) return false;
return this.filter.equals(((FilterCacheFilterWrapper) o).filter);
}
@Override
public int hashCode() {
return filter.hashCode() ^ 0x1117BF25;
return 31 * super.hashCode() + filter.hashCode();
}
}

View File

@ -36,7 +36,6 @@ import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchGenerationException;
@ -61,7 +60,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.similarity.SimilarityLookupService;
import org.elasticsearch.indices.InvalidTypeNameException;
@ -72,7 +70,6 @@ import org.elasticsearch.script.ScriptService;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
@ -455,10 +452,10 @@ public class MapperService extends AbstractIndexComponent {
if (hasNested && filterPercolateType) {
BooleanQuery bq = new BooleanQuery();
bq.add(percolatorType, Occur.MUST_NOT);
bq.add(NonNestedDocsFilter.INSTANCE, Occur.MUST);
bq.add(Queries.newNonNestedFilter(), Occur.MUST);
return Queries.wrap(bq);
} else if (hasNested) {
return NonNestedDocsFilter.INSTANCE;
return Queries.newNonNestedFilter();
} else if (filterPercolateType) {
return Queries.wrap(Queries.not(percolatorType));
} else {
@ -523,7 +520,7 @@ public class MapperService extends AbstractIndexComponent {
bool.add(percolatorType, BooleanClause.Occur.MUST_NOT);
}
if (hasNested) {
bool.add(NonNestedDocsFilter.INSTANCE, BooleanClause.Occur.MUST);
bool.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST);
}
return Queries.wrap(bool);

View File

@ -25,6 +25,7 @@ import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
@ -35,7 +36,6 @@ import org.elasticsearch.index.search.child.ChildrenConstantScoreQuery;
import org.elasticsearch.index.search.child.ChildrenQuery;
import org.elasticsearch.index.search.child.CustomQueryWrappingFilter;
import org.elasticsearch.index.search.child.ScoreType;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.internal.SubSearchContext;
@ -166,7 +166,7 @@ public class HasChildFilterParser implements FilterParser {
BitDocIdSetFilter nonNestedDocsFilter = null;
if (parentDocMapper.hasNestedObjects()) {
nonNestedDocsFilter = parseContext.bitsetFilter(NonNestedDocsFilter.INSTANCE);
nonNestedDocsFilter = parseContext.bitsetFilter(Queries.newNonNestedFilter());
}
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null, parseContext.autoFilterCachePolicy());

View File

@ -26,6 +26,7 @@ import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
@ -36,7 +37,6 @@ import org.elasticsearch.index.search.child.ChildrenConstantScoreQuery;
import org.elasticsearch.index.search.child.ChildrenQuery;
import org.elasticsearch.index.search.child.CustomQueryWrappingFilter;
import org.elasticsearch.index.search.child.ScoreType;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.internal.SubSearchContext;
@ -165,7 +165,7 @@ public class HasChildQueryParser implements QueryParser {
BitDocIdSetFilter nonNestedDocsFilter = null;
if (parentDocMapper.hasNestedObjects()) {
nonNestedDocsFilter = parseContext.bitsetFilter(NonNestedDocsFilter.INSTANCE);
nonNestedDocsFilter = parseContext.bitsetFilter(Queries.newNonNestedFilter());
}
// wrap the query with type query

View File

@ -23,6 +23,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
@ -31,7 +32,6 @@ import org.elasticsearch.index.query.support.XContentStructure;
import org.elasticsearch.index.search.child.CustomQueryWrappingFilter;
import org.elasticsearch.index.search.child.ScoreType;
import org.elasticsearch.index.search.child.TopChildrenQuery;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import java.io.IOException;
@ -128,7 +128,7 @@ public class TopChildrenQueryParser implements QueryParser {
BitDocIdSetFilter nonNestedDocsFilter = null;
if (childDocMapper.hasNestedObjects()) {
nonNestedDocsFilter = parseContext.bitsetFilter(NonNestedDocsFilter.INSTANCE);
nonNestedDocsFilter = parseContext.bitsetFilter(Queries.newNonNestedFilter());
}
innerQuery.setBoost(boost);

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.query.functionscore;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchParseException;
@ -463,12 +462,10 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
@Override
public Explanation explainScore(int docId, Explanation subQueryScore) throws IOException {
ComplexExplanation ce = new ComplexExplanation();
ce.setValue(CombineFunction.toFloat(score(docId, subQueryScore.getValue())));
ce.setMatch(true);
ce.setDescription("Function for field " + getFieldName() + ":");
ce.addDetail(func.explainFunction(getDistanceString(ctx, docId), distance.get(docId), scale));
return ce;
return Explanation.match(
CombineFunction.toFloat(score(docId, subQueryScore.getValue())),
"Function for field " + getFieldName() + ":",
func.explainFunction(getDistanceString(ctx, docId), distance.get(docId), scale));
}
};
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.query.functionscore.exp;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.index.query.functionscore.DecayFunction;
import org.elasticsearch.index.query.functionscore.DecayFunctionParser;
@ -49,10 +48,9 @@ public class ExponentialDecayFunctionParser extends DecayFunctionParser {
@Override
public Explanation explainFunction(String valueExpl, double value, double scale) {
ComplexExplanation ce = new ComplexExplanation();
ce.setValue((float) evaluate(value, scale));
ce.setDescription("exp(- " + valueExpl + " * " + -1 * scale + ")");
return ce;
return Explanation.match(
(float) evaluate(value, scale),
"exp(- " + valueExpl + " * " + -1 * scale + ")");
}
@Override

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.query.functionscore.gauss;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.index.query.functionscore.DecayFunction;
import org.elasticsearch.index.query.functionscore.DecayFunctionParser;
@ -45,10 +44,9 @@ public class GaussDecayFunctionParser extends DecayFunctionParser {
@Override
public Explanation explainFunction(String valueExpl, double value, double scale) {
ComplexExplanation ce = new ComplexExplanation();
ce.setValue((float) evaluate(value, scale));
ce.setDescription("exp(-0.5*pow(" + valueExpl + ",2.0)/" + -1 * scale + ")");
return ce;
return Explanation.match(
(float) evaluate(value, scale),
"exp(-0.5*pow(" + valueExpl + ",2.0)/" + -1 * scale + ")");
}
@Override

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.query.functionscore.lin;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.index.query.functionscore.DecayFunction;
import org.elasticsearch.index.query.functionscore.DecayFunctionParser;
@ -49,10 +48,9 @@ public class LinearDecayFunctionParser extends DecayFunctionParser {
@Override
public Explanation explainFunction(String valueExpl, double value, double scale) {
ComplexExplanation ce = new ComplexExplanation();
ce.setValue((float) evaluate(value, scale));
ce.setDescription("max(0.0, ((" + scale + " - " + valueExpl + ")/" + scale + ")");
return ce;
return Explanation.match(
(float) evaluate(value, scale),
"max(0.0, ((" + scale + " - " + valueExpl + ")/" + scale + ")");
}
@Override

View File

@ -23,6 +23,7 @@ import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
@ -31,7 +32,6 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -195,7 +195,7 @@ public class NestedInnerQueryParseSupport {
private void setPathLevel() {
ObjectMapper objectMapper = parseContext.nestedScope().getObjectMapper();
if (objectMapper == null) {
parentFilter = parseContext.bitsetFilter(NonNestedDocsFilter.INSTANCE);
parentFilter = parseContext.bitsetFilter(Queries.newNonNestedFilter());
} else {
parentFilter = parseContext.bitsetFilter(objectMapper.nestedTypeFilter());
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.search;
import java.io.IOException;
import java.util.Objects;
import com.carrotsearch.hppc.DoubleOpenHashSet;
import com.carrotsearch.hppc.LongOpenHashSet;
@ -86,16 +87,19 @@ public abstract class FieldDataTermsFilter extends Filter {
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null || !(obj instanceof FieldDataTermsFilter)) return false;
if (super.equals(obj) == false) return false;
FieldDataTermsFilter that = (FieldDataTermsFilter) obj;
if (!fieldData.getFieldNames().indexName().equals(that.fieldData.getFieldNames().indexName())) return false;
if (this.hashCode() != obj.hashCode()) return false;
return true;
}
@Override
public abstract int hashCode();
public int hashCode() {
int h = super.hashCode();
h = 31 * h + fieldData.getFieldNames().indexName().hashCode();
return h;
}
/**
* Filters on non-numeric fields.
@ -109,11 +113,17 @@ public abstract class FieldDataTermsFilter extends Filter {
this.terms = terms;
}
@Override
public boolean equals(Object obj) {
if (super.equals(obj) == false) {
return false;
}
return Objects.equals(terms, ((BytesFieldDataFilter) obj).terms);
}
@Override
public int hashCode() {
int hashcode = fieldData.getFieldNames().indexName().hashCode();
hashcode += terms != null ? terms.hashCode() : 0;
return hashcode;
return 31 * super.hashCode() + Objects.hashCode(terms);
}
@Override
@ -166,11 +176,17 @@ public abstract class FieldDataTermsFilter extends Filter {
this.terms = terms;
}
@Override
public boolean equals(Object obj) {
if (super.equals(obj) == false) {
return false;
}
return Objects.equals(terms, ((BytesFieldDataFilter) obj).terms);
}
@Override
public int hashCode() {
int hashcode = fieldData.getFieldNames().indexName().hashCode();
hashcode += terms != null ? terms.hashCode() : 0;
return hashcode;
return 31 * super.hashCode() + Objects.hashCode(terms);
}
@Override
@ -225,11 +241,17 @@ public abstract class FieldDataTermsFilter extends Filter {
this.terms = terms;
}
@Override
public boolean equals(Object obj) {
if (super.equals(obj) == false) {
return false;
}
return Objects.equals(terms, ((BytesFieldDataFilter) obj).terms);
}
@Override
public int hashCode() {
int hashcode = fieldData.getFieldNames().indexName().hashCode();
hashcode += terms != null ? terms.hashCode() : 0;
return hashcode;
return 31 * super.hashCode() + Objects.hashCode(terms);
}
@Override

View File

@ -85,7 +85,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
@Override
public final boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof NumericRangeFieldDataFilter)) return false;
if (super.equals(o) == false) return false;
NumericRangeFieldDataFilter other = (NumericRangeFieldDataFilter) o;
if (!this.indexFieldData.getFieldNames().indexName().equals(other.indexFieldData.getFieldNames().indexName())
@ -101,7 +101,8 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
@Override
public final int hashCode() {
int h = indexFieldData.getFieldNames().indexName().hashCode();
int h = super.hashCode();
h = 31 * h + indexFieldData.getFieldNames().indexName().hashCode();
h ^= (lowerVal != null) ? lowerVal.hashCode() : 550356204;
h = (h << 1) | (h >>> 31); // rotate to distinguish lower from upper
h ^= (upperVal != null) ? upperVal.hashCode() : -1674416163;

View File

@ -40,7 +40,6 @@ import org.apache.lucene.util.Bits;
import org.apache.lucene.util.LongBitSet;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.fielddata.AtomicParentChildFieldData;
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
import org.elasticsearch.search.internal.SearchContext;
@ -202,7 +201,7 @@ public class ChildrenConstantScoreQuery extends Query {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return new Explanation(getBoost(), "not implemented yet...");
return Explanation.match(getBoost(), "not implemented yet...");
}
@Override

View File

@ -36,14 +36,12 @@ import org.apache.lucene.search.Weight;
import org.apache.lucene.search.XFilteredDocIdSetIterator;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.FloatArray;
import org.elasticsearch.common.util.IntArray;
@ -264,7 +262,7 @@ public class ChildrenQuery extends Query {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return new Explanation(getBoost(), "not implemented yet...");
return Explanation.match(getBoost(), "not implemented yet...");
}
@Override

View File

@ -22,12 +22,20 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredDocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.LongBitSet;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.fielddata.AtomicParentChildFieldData;
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
@ -166,7 +174,7 @@ public class ParentConstantScoreQuery extends Query {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return new Explanation(getBoost(), "not implemented yet...");
return Explanation.match(getBoost(), "not implemented yet...");
}
@Override

View File

@ -195,4 +195,24 @@ final class ParentIdsFilter extends Filter {
public String toString(String field) {
return "parentsFilter(type=" + parentTypeBr.utf8ToString() + ")";
}
@Override
public boolean equals(Object obj) {
if (super.equals(obj) == false) {
return false;
}
ParentIdsFilter other = (ParentIdsFilter) obj;
return parentTypeBr.equals(other.parentTypeBr)
&& parentIds.equals(other.parentIds)
&& nonNestedDocsFilter.equals(nonNestedDocsFilter);
}
@Override
public int hashCode() {
int h = super.hashCode();
h = 31 * h + parentTypeBr.hashCode();
h = 31 * h + parentIds.hashCode();
h = 31 * h + nonNestedDocsFilter.hashCode();
return h;
}
}

View File

@ -18,17 +18,27 @@
*/
package org.elasticsearch.index.search.child;
import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.FloatArray;
import org.elasticsearch.common.util.LongHash;
@ -232,7 +242,7 @@ public class ParentQuery extends Query {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return new Explanation(getBoost(), "not implemented yet...");
return Explanation.match(getBoost(), "not implemented yet...");
}
@Override

View File

@ -368,7 +368,7 @@ public class TopChildrenQuery extends Query {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return new Explanation(getBoost(), "not implemented yet...");
return Explanation.match(getBoost(), "not implemented yet...");
}
}

View File

@ -123,7 +123,7 @@ public class GeoDistanceFilter extends Filter {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (super.equals(o) == false) return false;
GeoDistanceFilter filter = (GeoDistanceFilter) o;
@ -144,10 +144,10 @@ public class GeoDistanceFilter extends Filter {
@Override
public int hashCode() {
int result;
int result = super.hashCode();
long temp;
temp = lat != +0.0d ? Double.doubleToLongBits(lat) : 0L;
result = (int) (temp ^ (temp >>> 32));
result = 31 * result + (int) (temp ^ (temp >>> 32));
temp = lon != +0.0d ? Double.doubleToLongBits(lon) : 0L;
result = 31 * result + (int) (temp ^ (temp >>> 32));
temp = distance != +0.0d ? Double.doubleToLongBits(distance) : 0L;

View File

@ -133,7 +133,7 @@ public class GeoDistanceRangeFilter extends Filter {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (super.equals(o) == false) return false;
GeoDistanceRangeFilter filter = (GeoDistanceRangeFilter) o;
@ -155,10 +155,10 @@ public class GeoDistanceRangeFilter extends Filter {
@Override
public int hashCode() {
int result;
int result = super.hashCode();
long temp;
temp = lat != +0.0d ? Double.doubleToLongBits(lat) : 0L;
result = (int) (temp ^ (temp >>> 32));
result = 31 * result + (int) (temp ^ (temp >>> 32));
temp = lon != +0.0d ? Double.doubleToLongBits(lon) : 0L;
result = 31 * result + (int) (temp ^ (temp >>> 32));
temp = inclusiveLowerPoint != +0.0d ? Double.doubleToLongBits(inclusiveLowerPoint) : 0L;

View File

@ -76,6 +76,26 @@ public class InMemoryGeoBoundingBoxFilter extends Filter {
return "GeoBoundingBoxFilter(" + indexFieldData.getFieldNames().indexName() + ", " + topLeft + ", " + bottomRight + ")";
}
@Override
public boolean equals(Object obj) {
if (super.equals(obj) == false) {
return false;
}
InMemoryGeoBoundingBoxFilter other = (InMemoryGeoBoundingBoxFilter) obj;
return fieldName().equalsIgnoreCase(other.fieldName())
&& topLeft.equals(other.topLeft)
&& bottomRight.equals(other.bottomRight);
}
@Override
public int hashCode() {
int h = super.hashCode();
h = 31 * h + fieldName().hashCode();
h = 31 * h + topLeft.hashCode();
h = 31 * h + bottomRight.hashCode();
return h;
}
public static class Meridian180GeoBoundingBoxDocSet extends DocValuesDocIdSet {
private final MultiGeoPointValues values;
private final GeoPoint topLeft;

View File

@ -1,84 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.nested;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import java.io.IOException;
/**
* A filter that returns all root (non nested) documents.
*
* Root documents have an unique id, a type and optionally have a _source and other indexed and stored fields.
* A nested document is a sub documents that belong to a root document.
* Nested documents share the unique id and type and optionally the _source with root documents.
*/
public final class NonNestedDocsFilter extends Filter {
public static final NonNestedDocsFilter INSTANCE = new NonNestedDocsFilter();
private final Filter filter = Queries.wrap(Queries.not(nestedFilter()));
private final int hashCode = filter.hashCode();
private NonNestedDocsFilter() {
}
@Override
public Query clone() {
return INSTANCE;
}
@Override
public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException {
return filter.getDocIdSet(context, acceptDocs);
}
@Override
public int hashCode() {
return hashCode;
}
@Override
public boolean equals(Object obj) {
return obj == INSTANCE;
}
@Override
public String toString(String field) {
return "NonNestedDocsFilter";
}
/**
* @return a filter that returns all nested documents.
*/
private static Filter nestedFilter() {
return Queries.wrap(new PrefixQuery(new Term(TypeFieldMapper.NAME, new BytesRef("__"))));
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.shard;
import com.google.common.base.Charsets;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.index.CheckIndex;
import org.apache.lucene.search.Filter;
@ -50,6 +51,7 @@ import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.metrics.MeanMetric;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
@ -69,7 +71,14 @@ import org.elasticsearch.index.cache.query.ShardQueryCache;
import org.elasticsearch.index.codec.CodecService;
import org.elasticsearch.index.deletionpolicy.SnapshotDeletionPolicy;
import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit;
import org.elasticsearch.index.engine.*;
import org.elasticsearch.index.engine.CommitStats;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.EngineClosedException;
import org.elasticsearch.index.engine.EngineConfig;
import org.elasticsearch.index.engine.EngineException;
import org.elasticsearch.index.engine.EngineFactory;
import org.elasticsearch.index.engine.RefreshFailedEngineException;
import org.elasticsearch.index.engine.SegmentsStats;
import org.elasticsearch.index.fielddata.FieldDataStats;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.fielddata.ShardFieldData;
@ -78,7 +87,12 @@ import org.elasticsearch.index.get.GetStats;
import org.elasticsearch.index.get.ShardGetService;
import org.elasticsearch.index.indexing.IndexingStats;
import org.elasticsearch.index.indexing.ShardIndexingService;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperAnalyzer;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.merge.MergeStats;
import org.elasticsearch.index.merge.policy.MergePolicyProvider;
@ -88,7 +102,6 @@ import org.elasticsearch.index.percolator.stats.ShardPercolateService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.recovery.RecoveryStats;
import org.elasticsearch.index.refresh.RefreshStats;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.index.search.stats.SearchStats;
import org.elasticsearch.index.search.stats.ShardSearchService;
import org.elasticsearch.index.settings.IndexSettingsService;
@ -116,7 +129,6 @@ import java.io.IOException;
import java.io.PrintStream;
import java.nio.channels.ClosedByInterruptException;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
@ -545,7 +557,7 @@ public class IndexShard extends AbstractIndexShardComponent {
}
Filter aliasFilter = indexAliasesService.aliasFilter(filteringAliases);
BitDocIdSetFilter parentFilter = mapperService.hasNested() ? indexCache.bitsetFilterCache().getBitDocIdSetFilter(NonNestedDocsFilter.INSTANCE) : null;
BitDocIdSetFilter parentFilter = mapperService.hasNested() ? indexCache.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()) : null;
return new Engine.DeleteByQuery(query, source, filteringAliases, aliasFilter, parentFilter, origin, startTime, types);
}

View File

@ -70,14 +70,12 @@ import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MapperUtils;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.percolator.stats.ShardPercolateService;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.percolator.QueryCollector.Count;
@ -455,7 +453,7 @@ public class PercolatorService extends AbstractComponent {
for (Map.Entry<BytesRef, Query> entry : context.percolateQueries().entrySet()) {
try {
if (isNested) {
Lucene.exists(context.docSearcher(), entry.getValue(), NonNestedDocsFilter.INSTANCE, collector);
Lucene.exists(context.docSearcher(), entry.getValue(), Queries.newNonNestedFilter(), collector);
} else {
Lucene.exists(context.docSearcher(), entry.getValue(), collector);
}
@ -555,7 +553,7 @@ public class PercolatorService extends AbstractComponent {
}
try {
if (isNested) {
Lucene.exists(context.docSearcher(), entry.getValue(), NonNestedDocsFilter.INSTANCE, collector);
Lucene.exists(context.docSearcher(), entry.getValue(), Queries.newNonNestedFilter(), collector);
} else {
Lucene.exists(context.docSearcher(), entry.getValue(), collector);
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.percolator;
import com.carrotsearch.hppc.FloatArrayList;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
@ -30,13 +31,13 @@ import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.BucketCollector;
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregator;
@ -180,7 +181,7 @@ abstract class QueryCollector extends SimpleCollector {
}
if (isNestedDoc) {
Lucene.exists(searcher, query, NonNestedDocsFilter.INSTANCE, collector);
Lucene.exists(searcher, query, Queries.newNonNestedFilter(), collector);
} else {
Lucene.exists(searcher, query, collector);
}
@ -239,7 +240,7 @@ abstract class QueryCollector extends SimpleCollector {
// run the query
try {
if (isNestedDoc) {
Lucene.exists(searcher, query, NonNestedDocsFilter.INSTANCE, collector);
Lucene.exists(searcher, query, Queries.newNonNestedFilter(), collector);
} else {
Lucene.exists(searcher, query, collector);
}
@ -311,7 +312,7 @@ abstract class QueryCollector extends SimpleCollector {
context.hitContext().cache().clear();
}
if (isNestedDoc) {
Lucene.exists(searcher, query, NonNestedDocsFilter.INSTANCE, collector);
Lucene.exists(searcher, query, Queries.newNonNestedFilter(), collector);
} else {
Lucene.exists(searcher, query, collector);
}
@ -372,7 +373,7 @@ abstract class QueryCollector extends SimpleCollector {
// run the query
try {
if (isNestedDoc) {
Lucene.exists(searcher, query, NonNestedDocsFilter.INSTANCE, collector);
Lucene.exists(searcher, query, Queries.newNonNestedFilter(), collector);
} else {
Lucene.exists(searcher, query, collector);
}

View File

@ -22,15 +22,14 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilterCachingPolicy;
import org.apache.lucene.search.QueryCachingPolicy;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
@ -93,7 +92,7 @@ public class NestedAggregator extends SingleBucketAggregator {
// aggs execution
Filter parentFilterNotCached = findClosestNestedPath(parent());
if (parentFilterNotCached == null) {
parentFilterNotCached = NonNestedDocsFilter.INSTANCE;
parentFilterNotCached = Queries.newNonNestedFilter();
}
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(parentFilterNotCached);
BitDocIdSet parentSet = parentFilter.getDocIdSet(ctx);

View File

@ -27,9 +27,9 @@ import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator;
@ -55,7 +55,7 @@ public class ReverseNestedAggregator extends SingleBucketAggregator {
public ReverseNestedAggregator(String name, AggregatorFactories factories, ObjectMapper objectMapper, AggregationContext aggregationContext, Aggregator parent, Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, metaData);
if (objectMapper == null) {
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(NonNestedDocsFilter.INSTANCE);
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter());
} else {
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(objectMapper.nestedTypeFilter());
}

View File

@ -34,17 +34,21 @@ import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.text.StringAndBytesText;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.fieldvisitor.*;
import org.elasticsearch.index.fieldvisitor.AllFieldsVisitor;
import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor;
import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
import org.elasticsearch.index.fieldvisitor.JustUidFieldsVisitor;
import org.elasticsearch.index.fieldvisitor.UidAndSourceFieldsVisitor;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchParseElement;
@ -62,7 +66,6 @@ import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.InternalSearchHitField;
import org.elasticsearch.search.internal.InternalSearchHits;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
@ -210,7 +213,7 @@ public class FetchPhase implements SearchPhase {
private int findRootDocumentIfNested(SearchContext context, LeafReaderContext subReaderContext, int subDocId) throws IOException {
if (context.mapperService().hasNested()) {
BitDocIdSet nonNested = context.bitsetFilterCache().getBitDocIdSetFilter(NonNestedDocsFilter.INSTANCE).getDocIdSet(subReaderContext);
BitDocIdSet nonNested = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()).getDocIdSet(subReaderContext);
BitSet bits = nonNested.bits();
if (!bits.get(subDocId)) {
return bits.nextSetBit(subDocId);
@ -390,7 +393,7 @@ public class FetchPhase implements SearchPhase {
parentFilter = nestedParentObjectMapper.nestedTypeFilter();
} else {
field = nestedObjectMapper.fullPath();
parentFilter = NonNestedDocsFilter.INSTANCE;
parentFilter = Queries.newNonNestedFilter();
}
BitDocIdSet parentBitSet = context.bitsetFilterCache().getBitDocIdSetFilter(parentFilter).getDocIdSet(subReaderContext);

View File

@ -36,7 +36,6 @@ import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopDocsCollector;
import org.apache.lucene.search.TopFieldCollector;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.Bits;
@ -50,7 +49,6 @@ import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.FilteredSearchContext;
@ -126,7 +124,7 @@ public final class InnerHitsContext {
public TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) throws IOException {
Filter rawParentFilter;
if (parentObjectMapper == null) {
rawParentFilter = NonNestedDocsFilter.INSTANCE;
rawParentFilter = Queries.newNonNestedFilter();
} else {
rawParentFilter = parentObjectMapper.nestedTypeFilter();
}

View File

@ -20,7 +20,6 @@
package org.elasticsearch.search.rescore;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
@ -148,35 +147,35 @@ public final class QueryRescorer implements Rescorer {
ContextIndexSearcher searcher = context.searcher();
if (sourceExplanation == null) {
// this should not happen but just in case
return new ComplexExplanation(false, 0.0f, "nothing matched");
return Explanation.noMatch("nothing matched");
}
// TODO: this isn't right? I.e., we are incorrectly pretending all first pass hits were rescored? If the requested docID was
// beyond the top rescoreContext.window() in the first pass hits, we don't rescore it now?
Explanation rescoreExplain = searcher.explain(rescore.query(), topLevelDocId);
float primaryWeight = rescore.queryWeight();
ComplexExplanation prim = new ComplexExplanation(sourceExplanation.isMatch(),
sourceExplanation.getValue() * primaryWeight,
"product of:");
prim.addDetail(sourceExplanation);
prim.addDetail(new Explanation(primaryWeight, "primaryWeight"));
Explanation prim;
if (sourceExplanation.isMatch()) {
prim = Explanation.match(
sourceExplanation.getValue() * primaryWeight,
"product of:", sourceExplanation, Explanation.match(primaryWeight, "primaryWeight"));
} else {
prim = Explanation.noMatch("First pass did not match", sourceExplanation);
}
// NOTE: we don't use Lucene's Rescorer.explain because we want to insert our own description with which ScoreMode was used. Maybe
// we should add QueryRescorer.explainCombine to Lucene?
if (rescoreExplain != null && rescoreExplain.isMatch()) {
float secondaryWeight = rescore.rescoreQueryWeight();
ComplexExplanation sec = new ComplexExplanation(rescoreExplain.isMatch(),
Explanation sec = Explanation.match(
rescoreExplain.getValue() * secondaryWeight,
"product of:");
sec.addDetail(rescoreExplain);
sec.addDetail(new Explanation(secondaryWeight, "secondaryWeight"));
"product of:",
rescoreExplain, Explanation.match(secondaryWeight, "secondaryWeight"));
ScoreMode scoreMode = rescore.scoreMode();
ComplexExplanation calcExpl = new ComplexExplanation();
calcExpl.setDescription(scoreMode + " of:");
calcExpl.addDetail(prim);
calcExpl.setMatch(prim.isMatch());
calcExpl.addDetail(sec);
calcExpl.setValue(scoreMode.combine(prim.getValue(), sec.getValue()));
return calcExpl;
return Explanation.match(
scoreMode.combine(prim.getValue(), sec.getValue()),
scoreMode + " of:",
prim, sec);
} else {
return prim;
}

View File

@ -21,7 +21,10 @@ package org.elasticsearch.search.sort;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.search.*;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
@ -30,14 +33,18 @@ import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoDistance.FixedSourceDistance;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.internal.SearchContext;
@ -157,7 +164,7 @@ public class GeoDistanceSortParser implements SortParser {
final Nested nested;
if (nestedHelper != null && nestedHelper.getPath() != null) {
BitDocIdSetFilter rootDocumentsFilter = context.bitsetFilterCache().getBitDocIdSetFilter(NonNestedDocsFilter.INSTANCE);
BitDocIdSetFilter rootDocumentsFilter = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter());
Filter innerDocumentsFilter;
if (nestedHelper.filterFound()) {
innerDocumentsFilter = context.filterCache().cache(nestedHelper.getInnerFilter(), null, context.queryParserService().autoFilterCachePolicy());

View File

@ -27,16 +27,20 @@ import org.apache.lucene.search.SortField;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource;
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.SearchParseException;
@ -131,7 +135,7 @@ public class ScriptSortParser implements SortParser {
// If nested_path is specified, then wrap the `fieldComparatorSource` in a `NestedFieldComparatorSource`
final Nested nested;
if (nestedHelper != null && nestedHelper.getPath() != null) {
BitDocIdSetFilter rootDocumentsFilter = context.bitsetFilterCache().getBitDocIdSetFilter(NonNestedDocsFilter.INSTANCE);
BitDocIdSetFilter rootDocumentsFilter = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter());
Filter innerDocumentsFilter;
if (nestedHelper.filterFound()) {
innerDocumentsFilter = context.filterCache().cache(nestedHelper.getInnerFilter(), null, context.queryParserService().autoFilterCachePolicy());

View File

@ -21,6 +21,7 @@ package org.elasticsearch.search.sort;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
@ -28,6 +29,7 @@ import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
@ -36,7 +38,6 @@ import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchParseException;
@ -252,7 +253,7 @@ public class SortParseElement implements SearchParseElement {
}
final Nested nested;
if (nestedHelper != null && nestedHelper.getPath() != null) {
BitDocIdSetFilter rootDocumentsFilter = context.bitsetFilterCache().getBitDocIdSetFilter(NonNestedDocsFilter.INSTANCE);
BitDocIdSetFilter rootDocumentsFilter = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter());
Filter innerDocumentsFilter;
if (nestedHelper.filterFound()) {
innerDocumentsFilter = context.filterCache().cache(nestedHelper.getInnerFilter(), null, context.queryParserService().autoFilterCachePolicy());

View File

@ -19,7 +19,6 @@
package org.elasticsearch.explain;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.explain.ExplainResponse;
@ -275,7 +274,7 @@ public class ExplainActionTests extends ElasticsearchIntegrationTest {
@Test
public void streamExplainTest() throws Exception {
Explanation exp = new Explanation((float) 2.0, "some explanation");
Explanation exp = Explanation.match(2f, "some explanation");
// write
ByteArrayOutputStream outBuffer = new ByteArrayOutputStream();
@ -289,8 +288,7 @@ public class ExplainActionTests extends ElasticsearchIntegrationTest {
Explanation result = Lucene.readExplanation(esBuffer);
assertThat(exp.toString(),equalTo(result.toString()));
exp = new ComplexExplanation(true, 2.0f, "some explanation");
exp.addDetail(new Explanation(2.0f,"another explanation"));
exp = Explanation.match(2.0f, "some explanation", Explanation.match(2.0f,"another explanation"));
// write complex
outBuffer = new ByteArrayOutputStream();

View File

@ -43,7 +43,6 @@ import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.engine.Engine;
@ -53,7 +52,6 @@ import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.TestSearchContext;
@ -95,7 +93,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTests {
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
BitDocIdSetFilter parentFilter = wrapWithBitSetFilter(Queries.wrap(new TermQuery(new Term(TypeFieldMapper.NAME, "parent"))));
Query query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, childQuery, "parent", "child", parentFilter, 12, wrapWithBitSetFilter(NonNestedDocsFilter.INSTANCE));
Query query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, childQuery, "parent", "child", parentFilter, 12, wrapWithBitSetFilter(Queries.newNonNestedFilter()));
QueryUtils.check(query);
}

View File

@ -64,7 +64,6 @@ import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionBuilder;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.TestSearchContext;
@ -114,7 +113,7 @@ public class ChildrenQueryTests extends AbstractChildTests {
int minChildren = random().nextInt(10);
int maxChildren = scaledRandomIntBetween(minChildren, 10);
Query query = new ChildrenQuery(parentChildIndexFieldData, "parent", "child", parentFilter, childQuery, scoreType, minChildren,
maxChildren, 12, wrapWithBitSetFilter(NonNestedDocsFilter.INSTANCE));
maxChildren, 12, wrapWithBitSetFilter(Queries.newNonNestedFilter()));
QueryUtils.check(query);
}

View File

@ -24,9 +24,9 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.internal.SearchContext;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@ -56,7 +56,7 @@ public class TopChildrenQueryTests extends AbstractChildTests {
ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)];
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
Query query = new TopChildrenQuery(parentChildIndexFieldData, childQuery, "child", "parent", scoreType, 1, 1, wrapWithBitSetFilter(NonNestedDocsFilter.INSTANCE));
Query query = new TopChildrenQuery(parentChildIndexFieldData, childQuery, "child", "parent", scoreType, 1, 1, wrapWithBitSetFilter(Queries.newNonNestedFilter()));
QueryUtils.check(query);
}

View File

@ -532,7 +532,7 @@ public class SimpleNestedTests extends ElasticsearchIntegrationTest {
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
Explanation explanation = searchResponse.getHits().hits()[0].explanation();
assertThat(explanation.getValue(), equalTo(2f));
assertThat(explanation.toString(), equalTo("2.0 = (MATCH) sum of:\n 2.0 = (MATCH) Score based on child doc range from 0 to 1\n 0.0 = match on required clause, product of:\n 0.0 = # clause\n 0.0 = (MATCH) Match on id 2\n"));
assertThat(explanation.toString(), startsWith("2.0 = sum of:\n 2.0 = Score based on child doc range from 0 to 1\n"));
// TODO: Enable when changes from BlockJoinQuery#explain are added to Lucene (Most likely version 4.2)
// assertThat(explanation.getDetails().length, equalTo(2));
// assertThat(explanation.getDetails()[0].getValue(), equalTo(1f));

View File

@ -813,7 +813,7 @@ public class TopHitsTests extends ElasticsearchIntegrationTest {
// Can't explain nested hit with the main query, since both are in a different scopes, also the nested doc may not even have matched with the main query
// If top_hits would have a query option then we can explain that query
Explanation explanation = searchHit.explanation();
assertThat(explanation.toString(), containsString("Not a match"));
assertFalse(explanation.isMatch());
// Returns the version of the root document. Nested docs don't have a separate version
long version = searchHit.version();

View File

@ -35,10 +35,10 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.compress.CompressedString;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.BucketCollector;
@ -133,7 +133,7 @@ public class NestedAggregatorTest extends ElasticsearchSingleNodeTest {
// We exclude root doc with uid type#2, this will trigger the bug if we don't reset the root doc when we process a new segment, because
// root doc type#3 and root doc type#1 have the same segment docid
BooleanQuery bq = new BooleanQuery();
bq.add(NonNestedDocsFilter.INSTANCE, Occur.MUST);
bq.add(Queries.newNonNestedFilter(), Occur.MUST);
bq.add(new TermQuery(new Term(UidFieldMapper.NAME, "type#2")), Occur.MUST_NOT);
searcher.search(new ConstantScoreQuery(bq), collector);
collector.postCollection();

View File

@ -852,7 +852,7 @@ public class SimpleChildQuerySearchTests extends ElasticsearchIntegrationTest {
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max"))
.get();
assertThat(explainResponse.isExists(), equalTo(true));
assertThat(explainResponse.getExplanation().toString(), equalTo("1.0 = (MATCH) sum of:\n 1.0 = not implemented yet...\n 0.0 = match on required clause, product of:\n 0.0 = # clause\n 0.0 = (MATCH) Match on id 0\n"));
assertThat(explainResponse.getExplanation().toString(), equalTo("1.0 = sum of:\n 1.0 = not implemented yet...\n 0.0 = match on required clause, product of:\n 0.0 = # clause\n 0.0 = Match on id 0\n"));
}
List<IndexRequestBuilder> createDocBuilders() {

View File

@ -108,11 +108,8 @@ public class ExplainableScriptTests extends ElasticsearchIntegrationTest {
@Override
public Explanation explain(Explanation subQueryScore) throws IOException {
Explanation exp = new Explanation((float) (runAsDouble()), "This script returned " + runAsDouble());
Explanation scoreExp = new Explanation(subQueryScore.getValue(), "_score: ");
scoreExp.addDetail(subQueryScore);
exp.addDetail(scoreExp);
return exp;
Explanation scoreExp = Explanation.match(subQueryScore.getValue(), "_score: ", subQueryScore);
return Explanation.match((float) (runAsDouble()), "This script returned " + runAsDouble(), scoreExp);
}
@Override

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.functionscore;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.search.SearchResponse;
@ -33,6 +32,8 @@ import org.elasticsearch.index.query.functionscore.FunctionScoreModule;
import org.elasticsearch.plugins.AbstractPlugin;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
import org.elasticsearch.test.ElasticsearchIntegrationTest.Scope;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import org.junit.Test;
@ -43,8 +44,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
import static org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope;
import static org.hamcrest.Matchers.equalTo;
/**
@ -142,9 +141,7 @@ public class FunctionScorePluginTests extends ElasticsearchIntegrationTest {
@Override
public Explanation explainFunction(String distanceString, double distanceVal, double scale) {
ComplexExplanation ce = new ComplexExplanation();
ce.setDescription("" + distanceVal);
return ce;
return Explanation.match((float) distanceVal, "" + distanceVal);
}
@Override

View File

@ -179,7 +179,7 @@ public class FunctionScoreTests extends ElasticsearchIntegrationTest {
).explain(true))).actionGet();
assertThat(responseWithWeights.getHits().getAt(0).getExplanation().toString(),
equalTo("6.0 = (MATCH) function score, product of:\n 1.0 = (MATCH) ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 6.0 = (MATCH) Math.min of\n 6.0 = (MATCH) function score, score mode [multiply]\n 1.0 = (MATCH) function score, product of:\n 1.0 = match filter: QueryWrapperFilter(*:*)\n 1.0 = (MATCH) Function for field geo_point_field:\n 1.0 = exp(-0.5*pow(MIN of: [Math.max(arcDistance([10.0, 20.0](=doc value),[10.0, 20.0](=origin)) - 0.0(=offset), 0)],2.0)/7.213475204444817E11)\n 2.0 = (MATCH) function score, product of:\n 1.0 = match filter: QueryWrapperFilter(*:*)\n 2.0 = (MATCH) product of:\n 1.0 = field value function: ln(doc['double_field'].value * factor=1.0)\n 2.0 = weight\n 3.0 = (MATCH) function score, product of:\n 1.0 = match filter: QueryWrapperFilter(*:*)\n 3.0 = (MATCH) product of:\n 1.0 = script score function, computed with script:\"_index['text_field']['value'].tf()\n 1.0 = _score: \n 1.0 = (MATCH) ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 3.0 = weight\n 3.4028235E38 = maxBoost\n 1.0 = queryBoost\n")
equalTo("6.0 = function score, product of:\n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 6.0 = Math.min of\n 6.0 = function score, score mode [multiply]\n 1.0 = function score, product of:\n 1.0 = match filter: QueryWrapperFilter(*:*)\n 1.0 = Function for field geo_point_field:\n 1.0 = exp(-0.5*pow(MIN of: [Math.max(arcDistance([10.0, 20.0](=doc value),[10.0, 20.0](=origin)) - 0.0(=offset), 0)],2.0)/7.213475204444817E11)\n 2.0 = function score, product of:\n 1.0 = match filter: QueryWrapperFilter(*:*)\n 2.0 = product of:\n 1.0 = field value function: ln(doc['double_field'].value * factor=1.0)\n 2.0 = weight\n 3.0 = function score, product of:\n 1.0 = match filter: QueryWrapperFilter(*:*)\n 3.0 = product of:\n 1.0 = script score function, computed with script:\"_index['text_field']['value'].tf()\n 1.0 = _score: \n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 3.0 = weight\n 3.4028235E38 = maxBoost\n 1.0 = queryBoost\n")
);
responseWithWeights = client().search(
searchRequest().source(
@ -188,7 +188,7 @@ public class FunctionScoreTests extends ElasticsearchIntegrationTest {
.add(weightFactorFunction(4.0f))
).explain(true))).actionGet();
assertThat(responseWithWeights.getHits().getAt(0).getExplanation().toString(),
equalTo("4.0 = (MATCH) function score, product of:\n 1.0 = (MATCH) ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 4.0 = (MATCH) Math.min of\n 4.0 = (MATCH) product of:\n 1.0 = constant score 1.0 - no function provided\n 4.0 = weight\n 3.4028235E38 = maxBoost\n 1.0 = queryBoost\n")
equalTo("4.0 = function score, product of:\n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 4.0 = Math.min of\n 4.0 = product of:\n 1.0 = constant score 1.0 - no function provided\n 4.0 = weight\n 3.4028235E38 = maxBoost\n 1.0 = queryBoost\n")
);
}

View File

@ -161,7 +161,7 @@ public class InnerHitsTests extends ElasticsearchIntegrationTest {
assertThat(innerHits.getTotalHits(), equalTo(2l));
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getHighlightFields().get("comments.message").getFragments()[0].string(), equalTo("<em>fox</em> eat quick"));
assertThat(innerHits.getAt(0).explanation().toString(), containsString("(MATCH) weight(comments.message:fox in"));
assertThat(innerHits.getAt(0).explanation().toString(), containsString("weight(comments.message:fox in"));
assertThat(innerHits.getAt(0).getFields().get("comments.message").getValue().toString(), equalTo("eat"));
assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("eat"));
}
@ -338,7 +338,7 @@ public class InnerHitsTests extends ElasticsearchIntegrationTest {
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getHighlightFields().get("message").getFragments()[0].string(), equalTo("<em>fox</em> eat quick"));
assertThat(innerHits.getAt(0).explanation().toString(), containsString("(MATCH) weight(message:fox"));
assertThat(innerHits.getAt(0).explanation().toString(), containsString("weight(message:fox"));
assertThat(innerHits.getAt(0).getFields().get("message").getValue().toString(), equalTo("eat"));
assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("eat"));
}