mirror of https://github.com/apache/lucene.git
SOLR-8542: master-to-branch_6x backport changes (Michael Nilsson, Naveen Santhapuri, Christine Poerschke)
* removed 'boost' arg from LTRScoringQuery.createWeight signature * classes extending Weight now implement normalize and getValueForNormalization * FieldLengthFeatureScorer tweaks
This commit is contained in:
parent
3e2657214e
commit
9e8dd854cd
|
@ -187,7 +187,7 @@ public class LTRScoringQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public ModelWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost)
|
||||
public ModelWeight createWeight(IndexSearcher searcher, boolean needsScores)
|
||||
throws IOException {
|
||||
final Collection<Feature> modelFeatures = ltrScoringModel.getFeatures();
|
||||
final Collection<Feature> allFeatures = ltrScoringModel.getAllFeatures();
|
||||
|
@ -458,6 +458,18 @@ public class LTRScoringQuery extends Query {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return 1f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
for (final Feature.FeatureWeight featureWeight : extractedFeatureWeights) {
|
||||
featureWeight.normalize(norm, boost);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ModelScorer scorer(LeafReaderContext context) throws IOException {
|
||||
|
||||
|
|
|
@ -111,6 +111,16 @@ public class FieldLengthFeature extends Feature {
|
|||
super(FieldLengthFeature.this, searcher, request, originalQuery, efi);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return 1f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
// no op
|
||||
}
|
||||
|
||||
@Override
|
||||
public FeatureScorer scorer(LeafReaderContext context) throws IOException {
|
||||
NumericDocValues norms = context.reader().getNormValues(field);
|
||||
|
@ -127,7 +137,7 @@ public class FieldLengthFeature extends Feature {
|
|||
|
||||
public FieldLengthFeatureScorer(FeatureWeight weight,
|
||||
NumericDocValues norms) throws IOException {
|
||||
super(weight, norms);
|
||||
super(weight, DocIdSetIterator.all(DocIdSetIterator.NO_MORE_DOCS));
|
||||
this.norms = norms;
|
||||
|
||||
// In the constructor, docId is -1, so using 0 as default lookup
|
||||
|
@ -142,7 +152,7 @@ public class FieldLengthFeature extends Feature {
|
|||
@Override
|
||||
public float score() throws IOException {
|
||||
|
||||
final long l = norms.longValue();
|
||||
final long l = norms.get(itr.docID());
|
||||
final float numTerms = decodeNorm(l);
|
||||
return numTerms;
|
||||
}
|
||||
|
|
|
@ -89,6 +89,16 @@ public class FieldValueFeature extends Feature {
|
|||
super(FieldValueFeature.this, searcher, request, originalQuery, efi);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return 1f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
// no op
|
||||
}
|
||||
|
||||
@Override
|
||||
public FeatureScorer scorer(LeafReaderContext context) throws IOException {
|
||||
return new FieldValueFeatureScorer(this, context,
|
||||
|
|
|
@ -83,6 +83,16 @@ public class OriginalScoreFeature extends Feature {
|
|||
w.extractTerms(terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return w.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
w.normalize(norm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FeatureScorer scorer(LeafReaderContext context) throws IOException {
|
||||
|
||||
|
|
|
@ -212,6 +212,22 @@ public class SolrFeature extends Feature {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
if (solrQueryWeight != null) {
|
||||
return solrQueryWeight.getValueForNormalization();
|
||||
} else {
|
||||
return 0f;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
if (solrQueryWeight != null) {
|
||||
solrQueryWeight.normalize(norm, boost);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public FeatureScorer scorer(LeafReaderContext context) throws IOException {
|
||||
Scorer solrScorer = null;
|
||||
|
|
|
@ -129,6 +129,16 @@ public class ValueFeature extends Feature {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return 1f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
// no op
|
||||
}
|
||||
|
||||
@Override
|
||||
public FeatureScorer scorer(LeafReaderContext context) throws IOException {
|
||||
if(featureValue!=null) {
|
||||
|
|
|
@ -199,7 +199,7 @@ public class LTRFeatureLoggerTransformerFactory extends TransformerFactory {
|
|||
featureLogger = scoringQuery.getFeatureLogger();
|
||||
|
||||
try {
|
||||
modelWeight = scoringQuery.createWeight(searcher, true, 1f);
|
||||
modelWeight = scoringQuery.createWeight(searcher, true);
|
||||
} catch (final IOException e) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e.getMessage(), e);
|
||||
}
|
||||
|
|
|
@ -273,7 +273,7 @@ public class TestLTRReRankingPipeline extends LuceneTestCase {
|
|||
MockModel ltrScoringModel = new MockModel("test",
|
||||
features, norms, "test", allFeatures, null);
|
||||
LTRScoringQuery query = new LTRScoringQuery(ltrScoringModel);
|
||||
LTRScoringQuery.ModelWeight wgt = query.createWeight(null, true, 1f);
|
||||
LTRScoringQuery.ModelWeight wgt = query.createWeight(null, true);
|
||||
LTRScoringQuery.ModelWeight.ModelScorer modelScr = wgt.scorer(null);
|
||||
modelScr.getDocInfo().setOriginalDocScore(new Float(1f));
|
||||
for (final Scorer.ChildScorer feat : modelScr.getChildren()) {
|
||||
|
@ -289,7 +289,7 @@ public class TestLTRReRankingPipeline extends LuceneTestCase {
|
|||
ltrScoringModel = new MockModel("test", features, norms,
|
||||
"test", allFeatures, null);
|
||||
query = new LTRScoringQuery(ltrScoringModel);
|
||||
wgt = query.createWeight(null, true, 1f);
|
||||
wgt = query.createWeight(null, true);
|
||||
modelScr = wgt.scorer(null);
|
||||
modelScr.getDocInfo().setOriginalDocScore(new Float(1f));
|
||||
for (final Scorer.ChildScorer feat : modelScr.getChildren()) {
|
||||
|
|
Loading…
Reference in New Issue