lucene4: switched setNextReader from IndexReader to AtomicReaderContext

This commit is contained in:
Igor Motov 2012-10-30 22:37:43 -04:00 committed by Shay Banon
parent 25d03a6a7d
commit 93906903b6
7 changed files with 37 additions and 33 deletions

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.lucene.search; package org.elasticsearch.common.lucene.search;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Collector; import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Scorer;
@ -57,9 +57,9 @@ public class FilteredCollector extends Collector {
} }
@Override @Override
public void setNextReader(IndexReader reader, int docBase) throws IOException { public void setNextReader(AtomicReaderContext context) throws IOException {
collector.setNextReader(reader, docBase); collector.setNextReader(context);
docSet = DocSets.convert(reader, filter.getDocIdSet(reader)); docSet = DocSets.convert(context.reader(), filter.getDocIdSet(context));
} }
@Override @Override

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.lucene.search.function; package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Explanation;
/** /**
@ -39,7 +39,7 @@ public class BoostScoreFunction implements ScoreFunction {
} }
@Override @Override
public void setNextReader(IndexReader reader) { public void setNextReader(AtomicReaderContext context) {
// nothing to do here... // nothing to do here...
} }

View File

@ -19,9 +19,11 @@
package org.elasticsearch.common.lucene.search.function; package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils; import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.common.lucene.docset.DocSet; import org.elasticsearch.common.lucene.docset.DocSet;
import org.elasticsearch.common.lucene.docset.DocSets; import org.elasticsearch.common.lucene.docset.DocSets;
@ -106,15 +108,15 @@ public class FiltersFunctionScoreQuery extends Query {
} }
@Override @Override
public Weight createWeight(Searcher searcher) throws IOException { public Weight createWeight(IndexSearcher searcher) throws IOException {
return new CustomBoostFactorWeight(searcher); return new CustomBoostFactorWeight(searcher);
} }
class CustomBoostFactorWeight extends Weight { class CustomBoostFactorWeight extends Weight {
Searcher searcher; IndexSearcher searcher;
Weight subQueryWeight; Weight subQueryWeight;
public CustomBoostFactorWeight(Searcher searcher) throws IOException { public CustomBoostFactorWeight(IndexSearcher searcher) throws IOException {
this.searcher = searcher; this.searcher = searcher;
this.subQueryWeight = subQuery.weight(searcher); this.subQueryWeight = subQuery.weight(searcher);
} }
@ -141,31 +143,31 @@ public class FiltersFunctionScoreQuery extends Query {
} }
@Override @Override
public Scorer scorer(IndexReader reader, boolean scoreDocsInOrder, boolean topScorer) throws IOException { public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
Scorer subQueryScorer = subQueryWeight.scorer(reader, scoreDocsInOrder, false); Scorer subQueryScorer = subQueryWeight.scorer(context, scoreDocsInOrder, false, acceptDocs);
if (subQueryScorer == null) { if (subQueryScorer == null) {
return null; return null;
} }
for (int i = 0; i < filterFunctions.length; i++) { for (int i = 0; i < filterFunctions.length; i++) {
FilterFunction filterFunction = filterFunctions[i]; FilterFunction filterFunction = filterFunctions[i];
filterFunction.function.setNextReader(reader); filterFunction.function.setNextReader(context);
docSets[i] = DocSets.convert(reader, filterFunction.filter.getDocIdSet(reader)); docSets[i] = DocSets.convert(context.reader(), filterFunction.filter.getDocIdSet(context, acceptDocs));
} }
return new CustomBoostFactorScorer(getSimilarity(searcher), this, subQueryScorer, scoreMode, filterFunctions, maxBoost, docSets); return new CustomBoostFactorScorer(getSimilarity(searcher), this, subQueryScorer, scoreMode, filterFunctions, maxBoost, docSets);
} }
@Override @Override
public Explanation explain(IndexReader reader, int doc) throws IOException { public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
Explanation subQueryExpl = subQueryWeight.explain(reader, doc); Explanation subQueryExpl = subQueryWeight.explain(context, doc);
if (!subQueryExpl.isMatch()) { if (!subQueryExpl.isMatch()) {
return subQueryExpl; return subQueryExpl;
} }
if (scoreMode == ScoreMode.First) { if (scoreMode == ScoreMode.First) {
for (FilterFunction filterFunction : filterFunctions) { for (FilterFunction filterFunction : filterFunctions) {
DocSet docSet = DocSets.convert(reader, filterFunction.filter.getDocIdSet(reader)); DocSet docSet = DocSets.convert(context.reader(), filterFunction.filter.getDocIdSet(context));
if (docSet.get(doc)) { if (docSet.get(doc)) {
filterFunction.function.setNextReader(reader); filterFunction.function.setNextReader(context);
Explanation functionExplanation = filterFunction.function.explainFactor(doc); Explanation functionExplanation = filterFunction.function.explainFactor(doc);
float sc = getValue() * subQueryExpl.getValue() * functionExplanation.getValue(); float sc = getValue() * subQueryExpl.getValue() * functionExplanation.getValue();
Explanation filterExplanation = new ComplexExplanation(true, sc, "custom score, product of:"); Explanation filterExplanation = new ComplexExplanation(true, sc, "custom score, product of:");
@ -189,9 +191,9 @@ public class FiltersFunctionScoreQuery extends Query {
float min = Float.POSITIVE_INFINITY; float min = Float.POSITIVE_INFINITY;
ArrayList<Explanation> filtersExplanations = new ArrayList<Explanation>(); ArrayList<Explanation> filtersExplanations = new ArrayList<Explanation>();
for (FilterFunction filterFunction : filterFunctions) { for (FilterFunction filterFunction : filterFunctions) {
DocSet docSet = DocSets.convert(reader, filterFunction.filter.getDocIdSet(reader)); DocSet docSet = DocSets.convert(context.reader(), filterFunction.filter.getDocIdSet(context));
if (docSet.get(doc)) { if (docSet.get(doc)) {
filterFunction.function.setNextReader(reader); filterFunction.function.setNextReader(context);
Explanation functionExplanation = filterFunction.function.explainFactor(doc); Explanation functionExplanation = filterFunction.function.explainFactor(doc);
float factor = functionExplanation.getValue(); float factor = functionExplanation.getValue();
count++; count++;

View File

@ -19,9 +19,11 @@
package org.elasticsearch.common.lucene.search.function; package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils; import org.apache.lucene.util.ToStringUtils;
import java.io.IOException; import java.io.IOException;
@ -63,17 +65,17 @@ public class FunctionScoreQuery extends Query {
} }
@Override @Override
public Weight createWeight(Searcher searcher) throws IOException { public Weight createWeight(IndexSearcher searcher) throws IOException {
return new CustomBoostFactorWeight(searcher); return new CustomBoostFactorWeight(searcher);
} }
class CustomBoostFactorWeight extends Weight { class CustomBoostFactorWeight extends Weight {
Searcher searcher; IndexSearcher searcher;
Weight subQueryWeight; Weight subQueryWeight;
public CustomBoostFactorWeight(Searcher searcher) throws IOException { public CustomBoostFactorWeight(IndexSearcher searcher) throws IOException {
this.searcher = searcher; this.searcher = searcher;
this.subQueryWeight = subQuery.weight(searcher); this.subQueryWeight = subQuery.createWeight(searcher);
} }
public Query getQuery() { public Query getQuery() {
@ -98,23 +100,23 @@ public class FunctionScoreQuery extends Query {
} }
@Override @Override
public Scorer scorer(IndexReader reader, boolean scoreDocsInOrder, boolean topScorer) throws IOException { public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
Scorer subQueryScorer = subQueryWeight.scorer(reader, scoreDocsInOrder, false); Scorer subQueryScorer = subQueryWeight.scorer(context, scoreDocsInOrder, false, acceptDocs);
if (subQueryScorer == null) { if (subQueryScorer == null) {
return null; return null;
} }
function.setNextReader(reader); function.setNextReader(context);
return new CustomBoostFactorScorer(getSimilarity(searcher), this, subQueryScorer, function); return new CustomBoostFactorScorer(getSimilarity(searcher), this, subQueryScorer, function);
} }
@Override @Override
public Explanation explain(IndexReader reader, int doc) throws IOException { public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
Explanation subQueryExpl = subQueryWeight.explain(reader, doc); Explanation subQueryExpl = subQueryWeight.explain(context, doc);
if (!subQueryExpl.isMatch()) { if (!subQueryExpl.isMatch()) {
return subQueryExpl; return subQueryExpl;
} }
function.setNextReader(reader); function.setNextReader(context);
Explanation functionExplanation = function.explainScore(doc, subQueryExpl); Explanation functionExplanation = function.explainScore(doc, subQueryExpl);
float sc = getValue() * functionExplanation.getValue(); float sc = getValue() * functionExplanation.getValue();
Explanation res = new ComplexExplanation(true, sc, "custom score, product of:"); Explanation res = new ComplexExplanation(true, sc, "custom score, product of:");

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.lucene.search.function; package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Explanation;
/** /**
@ -27,7 +27,7 @@ import org.apache.lucene.search.Explanation;
*/ */
public interface ScoreFunction { public interface ScoreFunction {
void setNextReader(IndexReader reader); void setNextReader(AtomicReaderContext context);
float score(int docId, float subQueryScore); float score(int docId, float subQueryScore);

View File

@ -166,7 +166,7 @@ public class ScriptFilterParser implements FilterParser {
@Override @Override
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException { public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
searchScript.setNextReader(context.reader()); searchScript.setNextReader(context);
// LUCENE 4 UPGRADE: we can simply wrap this here since it is not cacheable and if we are not top level we will get a null passed anyway // LUCENE 4 UPGRADE: we can simply wrap this here since it is not cacheable and if we are not top level we will get a null passed anyway
return BitsFilteredDocIdSet.wrap(new ScriptDocSet(context.reader(), searchScript), acceptDocs); return BitsFilteredDocIdSet.wrap(new ScriptDocSet(context.reader(), searchScript), acceptDocs);
} }

View File

@ -67,7 +67,7 @@ public class ScriptFieldsFetchSubPhase implements FetchSubPhase {
@Override @Override
public void hitExecute(SearchContext context, HitContext hitContext) throws ElasticSearchException { public void hitExecute(SearchContext context, HitContext hitContext) throws ElasticSearchException {
for (ScriptFieldsContext.ScriptField scriptField : context.scriptFields().fields()) { for (ScriptFieldsContext.ScriptField scriptField : context.scriptFields().fields()) {
scriptField.script().setNextReader(hitContext.reader()); scriptField.script().setNextReader(hitContext.readerContext());
scriptField.script().setNextDocId(hitContext.docId()); scriptField.script().setNextDocId(hitContext.docId());
Object value; Object value;