LUCENE-2854: remove SimilarityDelegator; force cutover of Similarity.lengthNorm -> Similarity.computeNorm

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1056771 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2011-01-08 19:20:05 +00:00
parent a518f57756
commit 4f7eba849a
27 changed files with 191 additions and 275 deletions

View File

@ -38,6 +38,7 @@ import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Fieldable; import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermVectorOffsetInfo; import org.apache.lucene.index.TermVectorOffsetInfo;
@ -235,9 +236,10 @@ public class InstantiatedIndexWriter implements Closeable {
termsInDocument += eFieldTermDocInfoFactoriesByTermText.getValue().size(); termsInDocument += eFieldTermDocInfoFactoriesByTermText.getValue().size();
if (eFieldTermDocInfoFactoriesByTermText.getKey().indexed && !eFieldTermDocInfoFactoriesByTermText.getKey().omitNorms) { if (eFieldTermDocInfoFactoriesByTermText.getKey().indexed && !eFieldTermDocInfoFactoriesByTermText.getKey().omitNorms) {
float norm = eFieldTermDocInfoFactoriesByTermText.getKey().boost; final FieldInvertState invertState = new FieldInvertState();
norm *= document.getDocument().getBoost(); invertState.setBoost(eFieldTermDocInfoFactoriesByTermText.getKey().boost * document.getDocument().getBoost());
norm *= similarity.lengthNorm(eFieldTermDocInfoFactoriesByTermText.getKey().fieldName, eFieldTermDocInfoFactoriesByTermText.getKey().fieldLength); invertState.setLength(eFieldTermDocInfoFactoriesByTermText.getKey().fieldLength);
final float norm = similarity.computeNorm(eFieldTermDocInfoFactoriesByTermText.getKey().fieldName, invertState);
normsByFieldNameAndDocumentNumber.get(eFieldTermDocInfoFactoriesByTermText.getKey().fieldName)[document.getDocumentNumber()] = similarity.encodeNormValue(norm); normsByFieldNameAndDocumentNumber.get(eFieldTermDocInfoFactoriesByTermText.getKey().fieldName)[document.getDocumentNumber()] = similarity.encodeNormValue(norm);
} else { } else {
System.currentTimeMillis(); System.currentTimeMillis();

View File

@ -33,7 +33,7 @@ import org.apache.lucene.util.ReaderUtil;
/** /**
* Given a directory and a list of fields, updates the fieldNorms in place for every document. * Given a directory and a list of fields, updates the fieldNorms in place for every document.
* *
* If Similarity class is specified, uses its lengthNorm method to set norms. * If Similarity class is specified, uses its computeNorm method to set norms.
* If -n command line argument is used, removed field norms, as if * If -n command line argument is used, removed field norms, as if
* {@link org.apache.lucene.document.Field.Index}.NO_NORMS was used. * {@link org.apache.lucene.document.Field.Index}.NO_NORMS was used.
* *
@ -119,6 +119,7 @@ public class FieldNormModifier {
final List<IndexReader> subReaders = new ArrayList<IndexReader>(); final List<IndexReader> subReaders = new ArrayList<IndexReader>();
ReaderUtil.gatherSubReaders(subReaders, reader); ReaderUtil.gatherSubReaders(subReaders, reader);
final FieldInvertState invertState = new FieldInvertState();
for(IndexReader subReader : subReaders) { for(IndexReader subReader : subReaders) {
final Bits delDocs = subReader.getDeletedDocs(); final Bits delDocs = subReader.getDeletedDocs();
@ -143,9 +144,11 @@ public class FieldNormModifier {
} }
} }
invertState.setBoost(1.0f);
for (int d = 0; d < termCounts.length; d++) { for (int d = 0; d < termCounts.length; d++) {
if (delDocs == null || !delDocs.get(d)) { if (delDocs == null || !delDocs.get(d)) {
subReader.setNorm(d, fieldName, sim.encodeNormValue(sim.lengthNorm(fieldName, termCounts[d]))); invertState.setLength(termCounts[d]);
subReader.setNorm(d, fieldName, sim.encodeNormValue(sim.computeNorm(fieldName, invertState)));
} }
} }
} }

View File

@ -146,7 +146,7 @@ public class SweetSpotSimilarity extends DefaultSimilarity {
else else
numTokens = state.getLength(); numTokens = state.getLength();
return state.getBoost() * lengthNorm(fieldName, numTokens); return state.getBoost() * computeLengthNorm(fieldName, numTokens);
} }
/** /**
@ -167,8 +167,7 @@ public class SweetSpotSimilarity extends DefaultSimilarity {
* *
* @see #setLengthNormFactors * @see #setLengthNormFactors
*/ */
@Override public float computeLengthNorm(String fieldName, int numTerms) {
public float lengthNorm(String fieldName, int numTerms) {
int l = ln_min; int l = ln_min;
int h = ln_max; int h = ln_max;
float s = ln_steep; float s = ln_steep;

View File

@ -43,8 +43,8 @@ public class TestFieldNormModifier extends LuceneTestCase {
/** inverts the normal notion of lengthNorm */ /** inverts the normal notion of lengthNorm */
public static Similarity s = new DefaultSimilarity() { public static Similarity s = new DefaultSimilarity() {
@Override @Override
public float lengthNorm(String fieldName, int numTokens) { public float computeNorm(String fieldName, FieldInvertState state) {
return numTokens; return state.getBoost() * (discountOverlaps ? state.getLength() - state.getNumOverlap() : state.getLength());
} }
}; };

View File

@ -21,13 +21,14 @@ package org.apache.lucene.misc;
import org.apache.lucene.search.DefaultSimilarity; import org.apache.lucene.search.DefaultSimilarity;
import org.apache.lucene.search.Similarity; import org.apache.lucene.search.Similarity;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.index.FieldInvertState;
/** /**
* Test of the SweetSpotSimilarity * Test of the SweetSpotSimilarity
*/ */
public class SweetSpotSimilarityTest extends LuceneTestCase { public class SweetSpotSimilarityTest extends LuceneTestCase {
public void testSweetSpotLengthNorm() { public void testSweetSpotComputeNorm() {
SweetSpotSimilarity ss = new SweetSpotSimilarity(); SweetSpotSimilarity ss = new SweetSpotSimilarity();
ss.setLengthNormFactors(1,1,0.5f); ss.setLengthNormFactors(1,1,0.5f);
@ -37,10 +38,13 @@ public class SweetSpotSimilarityTest extends LuceneTestCase {
// base case, should degrade // base case, should degrade
final FieldInvertState invertState = new FieldInvertState();
invertState.setBoost(1.0f);
for (int i = 1; i < 1000; i++) { for (int i = 1; i < 1000; i++) {
invertState.setLength(i);
assertEquals("base case: i="+i, assertEquals("base case: i="+i,
d.lengthNorm("foo",i), s.lengthNorm("foo",i), d.computeNorm("foo", invertState),
s.computeNorm("foo", invertState),
0.0f); 0.0f);
} }
@ -49,14 +53,21 @@ public class SweetSpotSimilarityTest extends LuceneTestCase {
ss.setLengthNormFactors(3,10,0.5f); ss.setLengthNormFactors(3,10,0.5f);
for (int i = 3; i <=10; i++) { for (int i = 3; i <=10; i++) {
invertState.setLength(i);
assertEquals("3,10: spot i="+i, assertEquals("3,10: spot i="+i,
1.0f, s.lengthNorm("foo",i), 1.0f,
s.computeNorm("foo", invertState),
0.0f); 0.0f);
} }
for (int i = 10; i < 1000; i++) { for (int i = 10; i < 1000; i++) {
invertState.setLength(i-9);
final float normD = d.computeNorm("foo", invertState);
invertState.setLength(i);
final float normS = s.computeNorm("foo", invertState);
assertEquals("3,10: 10<x : i="+i, assertEquals("3,10: 10<x : i="+i,
d.lengthNorm("foo",i-9), s.lengthNorm("foo",i), normD,
normS,
0.0f); 0.0f);
} }
@ -68,33 +79,54 @@ public class SweetSpotSimilarityTest extends LuceneTestCase {
for (int i = 3; i <=10; i++) { for (int i = 3; i <=10; i++) {
invertState.setLength(i);
assertEquals("f: 3,10: spot i="+i, assertEquals("f: 3,10: spot i="+i,
1.0f, s.lengthNorm("foo",i), 1.0f,
s.computeNorm("foo", invertState),
0.0f); 0.0f);
} }
for (int i = 10; i < 1000; i++) { for (int i = 10; i < 1000; i++) {
invertState.setLength(i-9);
final float normD = d.computeNorm("foo", invertState);
invertState.setLength(i);
final float normS = s.computeNorm("foo", invertState);
assertEquals("f: 3,10: 10<x : i="+i, assertEquals("f: 3,10: 10<x : i="+i,
d.lengthNorm("foo",i-9), s.lengthNorm("foo",i), normD,
normS,
0.0f); 0.0f);
} }
for (int i = 8; i <=13; i++) { for (int i = 8; i <=13; i++) {
invertState.setLength(i);
assertEquals("f: 8,13: spot i="+i, assertEquals("f: 8,13: spot i="+i,
1.0f, s.lengthNorm("bar",i), 1.0f,
s.computeNorm("bar", invertState),
0.0f); 0.0f);
} }
for (int i = 6; i <=9; i++) { for (int i = 6; i <=9; i++) {
invertState.setLength(i);
assertEquals("f: 6,9: spot i="+i, assertEquals("f: 6,9: spot i="+i,
1.0f, s.lengthNorm("yak",i), 1.0f,
s.computeNorm("yak", invertState),
0.0f); 0.0f);
} }
for (int i = 13; i < 1000; i++) { for (int i = 13; i < 1000; i++) {
invertState.setLength(i-12);
final float normD = d.computeNorm("foo", invertState);
invertState.setLength(i);
final float normS = s.computeNorm("bar", invertState);
assertEquals("f: 8,13: 13<x : i="+i, assertEquals("f: 8,13: 13<x : i="+i,
d.lengthNorm("foo",i-12), s.lengthNorm("bar",i), normD,
normS,
0.0f); 0.0f);
} }
for (int i = 9; i < 1000; i++) { for (int i = 9; i < 1000; i++) {
invertState.setLength(i-8);
final float normD = d.computeNorm("foo", invertState);
invertState.setLength(i);
final float normS = s.computeNorm("yak", invertState);
assertEquals("f: 6,9: 9<x : i="+i, assertEquals("f: 6,9: 9<x : i="+i,
d.lengthNorm("foo",i-8), s.lengthNorm("yak",i), normD,
normS,
0.0f); 0.0f);
} }
@ -105,9 +137,12 @@ public class SweetSpotSimilarityTest extends LuceneTestCase {
ss.setLengthNormFactors("b",5,8,0.1f, false); ss.setLengthNormFactors("b",5,8,0.1f, false);
for (int i = 9; i < 1000; i++) { for (int i = 9; i < 1000; i++) {
assertTrue("s: i="+i+" : a="+ss.lengthNorm("a",i)+ invertState.setLength(i);
" < b="+ss.lengthNorm("b",i), final float normSS = ss.computeNorm("a", invertState);
ss.lengthNorm("a",i) < s.lengthNorm("b",i)); final float normS = s.computeNorm("b", invertState);
assertTrue("s: i="+i+" : a="+normSS+
" < b="+normS,
normSS < normS);
} }
} }

View File

@ -22,6 +22,7 @@ import java.io.IOException;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.FieldNormModifier; import org.apache.lucene.index.FieldNormModifier;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
@ -46,11 +47,11 @@ public class TestLengthNormModifier extends LuceneTestCase {
/** inverts the normal notion of lengthNorm */ /** inverts the normal notion of lengthNorm */
public static Similarity s = new DefaultSimilarity() { public static Similarity s = new DefaultSimilarity() {
@Override @Override
public float lengthNorm(String fieldName, int numTokens) { public float computeNorm(String fieldName, FieldInvertState state) {
return numTokens; return state.getBoost() * (discountOverlaps ? state.getLength() - state.getNumOverlap() : state.getLength());
} }
}; };
@Override @Override
public void setUp() throws Exception { public void setUp() throws Exception {
@ -162,11 +163,11 @@ public class TestLengthNormModifier extends LuceneTestCase {
// override the norms to be inverted // override the norms to be inverted
Similarity s = new DefaultSimilarity() { Similarity s = new DefaultSimilarity() {
@Override @Override
public float lengthNorm(String fieldName, int numTokens) { public float computeNorm(String fieldName, FieldInvertState state) {
return numTokens; return state.getBoost() * (discountOverlaps ? state.getLength() - state.getNumOverlap() : state.getLength());
} }
}; };
FieldNormModifier fnm = new FieldNormModifier(store, s); FieldNormModifier fnm = new FieldNormModifier(store, s);
fnm.reSetNorms("field"); fnm.reSetNorms("field");

View File

@ -292,7 +292,7 @@ public class FuzzyLikeThisQuery extends Query
{ {
//optimize where only one selected variant //optimize where only one selected variant
ScoreTerm st= variants.get(0); ScoreTerm st= variants.get(0);
TermQuery tq = new FuzzyTermQuery(st.term,ignoreTF); Query tq = ignoreTF ? new ConstantScoreQuery(new TermQuery(st.term)) : new TermQuery(st.term, 1);
tq.setBoost(st.score); // set the boost to a mix of IDF and score tq.setBoost(st.score); // set the boost to a mix of IDF and score
bq.add(tq, BooleanClause.Occur.SHOULD); bq.add(tq, BooleanClause.Occur.SHOULD);
} }
@ -303,7 +303,8 @@ public class FuzzyLikeThisQuery extends Query
.hasNext();) .hasNext();)
{ {
ScoreTerm st = iterator2.next(); ScoreTerm st = iterator2.next();
TermQuery tq = new FuzzyTermQuery(st.term,ignoreTF); // found a match // found a match
Query tq = ignoreTF ? new ConstantScoreQuery(new TermQuery(st.term)) : new TermQuery(st.term, 1);
tq.setBoost(st.score); // set the boost using the ScoreTerm's score tq.setBoost(st.score); // set the boost using the ScoreTerm's score
termVariants.add(tq, BooleanClause.Occur.SHOULD); // add to query termVariants.add(tq, BooleanClause.Occur.SHOULD); // add to query
} }
@ -350,43 +351,6 @@ public class FuzzyLikeThisQuery extends Query
} }
//overrides basic TermQuery to negate effects of IDF (idf is factored into boost of containing BooleanQuery)
private static class FuzzyTermQuery extends TermQuery
{
boolean ignoreTF;
public FuzzyTermQuery(Term t, boolean ignoreTF)
{
super(t);
this.ignoreTF=ignoreTF;
}
@Override
public Similarity getSimilarity(IndexSearcher searcher)
{
Similarity result = super.getSimilarity(searcher);
result = new SimilarityDelegator(result) {
@Override
public float tf(float freq)
{
if(ignoreTF)
{
return 1; //ignore tf
}
return super.tf(freq);
}
@Override
public float idf(int docFreq, int numDocs)
{
//IDF is already factored into individual term boosts
return 1;
}
};
return result;
}
}
/* (non-Javadoc) /* (non-Javadoc)
* @see org.apache.lucene.search.Query#toString(java.lang.String) * @see org.apache.lucene.search.Query#toString(java.lang.String)
*/ */

View File

@ -73,6 +73,10 @@ public final class FieldInvertState {
return length; return length;
} }
public void setLength(int length) {
this.length = length;
}
/** /**
* Get the number of terms with <code>positionIncrement == 0</code>. * Get the number of terms with <code>positionIncrement == 0</code>.
* @return the numOverlap * @return the numOverlap
@ -81,6 +85,10 @@ public final class FieldInvertState {
return numOverlap; return numOverlap;
} }
public void setNumOverlap(int numOverlap) {
this.numOverlap = numOverlap;
}
/** /**
* Get end offset of the last processed term. * Get end offset of the last processed term.
* @return the offset * @return the offset
@ -99,6 +107,10 @@ public final class FieldInvertState {
return boost; return boost;
} }
public void setBoost(float boost) {
this.boost = boost;
}
public AttributeSource getAttributeSource() { public AttributeSource getAttributeSource() {
return attributeSource; return attributeSource;
} }

View File

@ -63,10 +63,12 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
} }
private ArrayList<BooleanClause> clauses = new ArrayList<BooleanClause>(); private ArrayList<BooleanClause> clauses = new ArrayList<BooleanClause>();
private boolean disableCoord; private final boolean disableCoord;
/** Constructs an empty boolean query. */ /** Constructs an empty boolean query. */
public BooleanQuery() {} public BooleanQuery() {
disableCoord = false;
}
/** Constructs an empty boolean query. /** Constructs an empty boolean query.
* *
@ -87,22 +89,6 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
*/ */
public boolean isCoordDisabled() { return disableCoord; } public boolean isCoordDisabled() { return disableCoord; }
// Implement coord disabling.
// Inherit javadoc.
@Override
public Similarity getSimilarity(IndexSearcher searcher) {
Similarity result = super.getSimilarity(searcher);
if (disableCoord) { // disable coord as requested
result = new SimilarityDelegator(result) {
@Override
public float coord(int overlap, int maxOverlap) {
return 1.0f;
}
};
}
return result;
}
/** /**
* Specifies a minimum number of the optional BooleanClauses * Specifies a minimum number of the optional BooleanClauses
* which must be satisfied. * which must be satisfied.
@ -179,10 +165,12 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
protected Similarity similarity; protected Similarity similarity;
protected ArrayList<Weight> weights; protected ArrayList<Weight> weights;
protected int maxCoord; // num optional + num required protected int maxCoord; // num optional + num required
private final boolean disableCoord;
public BooleanWeight(IndexSearcher searcher) public BooleanWeight(IndexSearcher searcher, boolean disableCoord)
throws IOException { throws IOException {
this.similarity = getSimilarity(searcher); this.similarity = getSimilarity(searcher);
this.disableCoord = disableCoord;
weights = new ArrayList<Weight>(clauses.size()); weights = new ArrayList<Weight>(clauses.size());
for (int i = 0 ; i < clauses.size(); i++) { for (int i = 0 ; i < clauses.size(); i++) {
BooleanClause c = clauses.get(i); BooleanClause c = clauses.get(i);
@ -285,10 +273,10 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
sumExpl.setMatch(0 < coord ? Boolean.TRUE : Boolean.FALSE); sumExpl.setMatch(0 < coord ? Boolean.TRUE : Boolean.FALSE);
sumExpl.setValue(sum); sumExpl.setValue(sum);
float coordFactor = similarity.coord(coord, maxCoord); final float coordFactor = disableCoord ? 1.0f : similarity.coord(coord, maxCoord);
if (coordFactor == 1.0f) // coord is no-op if (coordFactor == 1.0f) {
return sumExpl; // eliminate wrapper return sumExpl; // eliminate wrapper
else { } else {
ComplexExplanation result = new ComplexExplanation(sumExpl.isMatch(), ComplexExplanation result = new ComplexExplanation(sumExpl.isMatch(),
sum*coordFactor, sum*coordFactor,
"product of:"); "product of:");
@ -324,7 +312,7 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
// Check if we can return a BooleanScorer // Check if we can return a BooleanScorer
if (!scoreDocsInOrder && topScorer && required.size() == 0 && prohibited.size() < 32) { if (!scoreDocsInOrder && topScorer && required.size() == 0 && prohibited.size() < 32) {
return new BooleanScorer(this, similarity, minNrShouldMatch, optional, prohibited, maxCoord); return new BooleanScorer(this, disableCoord, similarity, minNrShouldMatch, optional, prohibited, maxCoord);
} }
if (required.size() == 0 && optional.size() == 0) { if (required.size() == 0 && optional.size() == 0) {
@ -338,7 +326,7 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
} }
// Return a BooleanScorer2 // Return a BooleanScorer2
return new BooleanScorer2(this, similarity, minNrShouldMatch, required, prohibited, optional, maxCoord); return new BooleanScorer2(this, disableCoord, similarity, minNrShouldMatch, required, prohibited, optional, maxCoord);
} }
@Override @Override
@ -364,7 +352,7 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
@Override @Override
public Weight createWeight(IndexSearcher searcher) throws IOException { public Weight createWeight(IndexSearcher searcher) throws IOException {
return new BooleanWeight(searcher); return new BooleanWeight(searcher, disableCoord);
} }
@Override @Override

View File

@ -197,9 +197,9 @@ final class BooleanScorer extends Scorer {
private Bucket current; private Bucket current;
private int doc = -1; private int doc = -1;
BooleanScorer(Weight weight, Similarity similarity, int minNrShouldMatch, BooleanScorer(Weight weight, boolean disableCoord, Similarity similarity, int minNrShouldMatch,
List<Scorer> optionalScorers, List<Scorer> prohibitedScorers, int maxCoord) throws IOException { List<Scorer> optionalScorers, List<Scorer> prohibitedScorers, int maxCoord) throws IOException {
super(similarity, weight); super(null, weight); // Similarity not used
this.minNrShouldMatch = minNrShouldMatch; this.minNrShouldMatch = minNrShouldMatch;
if (optionalScorers != null && optionalScorers.size() > 0) { if (optionalScorers != null && optionalScorers.size() > 0) {
@ -222,9 +222,8 @@ final class BooleanScorer extends Scorer {
} }
coordFactors = new float[optionalScorers.size() + 1]; coordFactors = new float[optionalScorers.size() + 1];
Similarity sim = getSimilarity();
for (int i = 0; i < coordFactors.length; i++) { for (int i = 0; i < coordFactors.length; i++) {
coordFactors[i] = sim.coord(i, maxCoord); coordFactors[i] = disableCoord ? 1.0f : similarity.coord(i, maxCoord);
} }
} }

View File

@ -42,14 +42,12 @@ class BooleanScorer2 extends Scorer {
int maxCoord = 0; // to be increased for each non prohibited scorer int maxCoord = 0; // to be increased for each non prohibited scorer
int nrMatchers; // to be increased by score() of match counting scorers. int nrMatchers; // to be increased by score() of match counting scorers.
void init() { // use after all scorers have been added. void init(Similarity sim, boolean disableCoord) { // use after all scorers have been added.
coordFactors = new float[optionalScorers.size() + requiredScorers.size() + 1]; coordFactors = new float[optionalScorers.size() + requiredScorers.size() + 1];
Similarity sim = getSimilarity();
for (int i = 0; i < coordFactors.length; i++) { for (int i = 0; i < coordFactors.length; i++) {
coordFactors[i] = sim.coord(i, maxCoord); coordFactors[i] = disableCoord ? 1.0f : sim.coord(i, maxCoord);
} }
} }
} }
private final Coordinator coordinator; private final Coordinator coordinator;
@ -82,9 +80,9 @@ class BooleanScorer2 extends Scorer {
* @param optional * @param optional
* the list of optional scorers. * the list of optional scorers.
*/ */
public BooleanScorer2(Weight weight, Similarity similarity, int minNrShouldMatch, public BooleanScorer2(Weight weight, boolean disableCoord, Similarity similarity, int minNrShouldMatch,
List<Scorer> required, List<Scorer> prohibited, List<Scorer> optional, int maxCoord) throws IOException { List<Scorer> required, List<Scorer> prohibited, List<Scorer> optional, int maxCoord) throws IOException {
super(similarity, weight); super(null, weight); // Similarity not used
if (minNrShouldMatch < 0) { if (minNrShouldMatch < 0) {
throw new IllegalArgumentException("Minimum number of optional scorers should not be negative"); throw new IllegalArgumentException("Minimum number of optional scorers should not be negative");
} }
@ -96,8 +94,8 @@ class BooleanScorer2 extends Scorer {
requiredScorers = required; requiredScorers = required;
prohibitedScorers = prohibited; prohibitedScorers = prohibited;
coordinator.init(); coordinator.init(similarity, disableCoord);
countingSumScorer = makeCountingSumScorer(); countingSumScorer = makeCountingSumScorer(disableCoord, similarity);
} }
/** Count a scorer as a single match. */ /** Count a scorer as a single match. */
@ -109,7 +107,7 @@ class BooleanScorer2 extends Scorer {
private float lastDocScore = Float.NaN; private float lastDocScore = Float.NaN;
SingleMatchScorer(Scorer scorer) { SingleMatchScorer(Scorer scorer) {
super(scorer.getSimilarity()); super(null); // No similarity used.
this.scorer = scorer; this.scorer = scorer;
} }
@ -164,12 +162,12 @@ class BooleanScorer2 extends Scorer {
}; };
} }
private static final Similarity defaultSimilarity = Similarity.getDefault(); private Scorer countingConjunctionSumScorer(boolean disableCoord,
Similarity similarity,
private Scorer countingConjunctionSumScorer(List<Scorer> requiredScorers) throws IOException { List<Scorer> requiredScorers) throws IOException {
// each scorer from the list counted as a single matcher // each scorer from the list counted as a single matcher
final int requiredNrMatchers = requiredScorers.size(); final int requiredNrMatchers = requiredScorers.size();
return new ConjunctionScorer(defaultSimilarity, requiredScorers) { return new ConjunctionScorer(disableCoord ? 1.0f : similarity.coord(requiredScorers.size(), requiredScorers.size()), requiredScorers) {
private int lastScoredDoc = -1; private int lastScoredDoc = -1;
// Save the score of lastScoredDoc, so that we don't compute it more than // Save the score of lastScoredDoc, so that we don't compute it more than
// once in score(). // once in score().
@ -192,8 +190,10 @@ class BooleanScorer2 extends Scorer {
}; };
} }
private Scorer dualConjunctionSumScorer(Scorer req1, Scorer req2) throws IOException { // non counting. private Scorer dualConjunctionSumScorer(boolean disableCoord,
return new ConjunctionScorer(defaultSimilarity, req1, req2); Similarity similarity,
Scorer req1, Scorer req2) throws IOException { // non counting.
return new ConjunctionScorer(disableCoord ? 1.0f : similarity.coord(2, 2), req1, req2);
// All scorers match, so defaultSimilarity always has 1 as // All scorers match, so defaultSimilarity always has 1 as
// the coordination factor. // the coordination factor.
// Therefore the sum of the scores of two scorers // Therefore the sum of the scores of two scorers
@ -203,13 +203,14 @@ class BooleanScorer2 extends Scorer {
/** Returns the scorer to be used for match counting and score summing. /** Returns the scorer to be used for match counting and score summing.
* Uses requiredScorers, optionalScorers and prohibitedScorers. * Uses requiredScorers, optionalScorers and prohibitedScorers.
*/ */
private Scorer makeCountingSumScorer() throws IOException { // each scorer counted as a single matcher private Scorer makeCountingSumScorer(boolean disableCoord,
Similarity similarity) throws IOException { // each scorer counted as a single matcher
return (requiredScorers.size() == 0) return (requiredScorers.size() == 0)
? makeCountingSumScorerNoReq() ? makeCountingSumScorerNoReq(disableCoord, similarity)
: makeCountingSumScorerSomeReq(); : makeCountingSumScorerSomeReq(disableCoord, similarity);
} }
private Scorer makeCountingSumScorerNoReq() throws IOException { // No required scorers private Scorer makeCountingSumScorerNoReq(boolean disableCoord, Similarity similarity) throws IOException { // No required scorers
// minNrShouldMatch optional scorers are required, but at least 1 // minNrShouldMatch optional scorers are required, but at least 1
int nrOptRequired = (minNrShouldMatch < 1) ? 1 : minNrShouldMatch; int nrOptRequired = (minNrShouldMatch < 1) ? 1 : minNrShouldMatch;
Scorer requiredCountingSumScorer; Scorer requiredCountingSumScorer;
@ -217,24 +218,27 @@ class BooleanScorer2 extends Scorer {
requiredCountingSumScorer = countingDisjunctionSumScorer(optionalScorers, nrOptRequired); requiredCountingSumScorer = countingDisjunctionSumScorer(optionalScorers, nrOptRequired);
else if (optionalScorers.size() == 1) else if (optionalScorers.size() == 1)
requiredCountingSumScorer = new SingleMatchScorer(optionalScorers.get(0)); requiredCountingSumScorer = new SingleMatchScorer(optionalScorers.get(0));
else else {
requiredCountingSumScorer = countingConjunctionSumScorer(optionalScorers); requiredCountingSumScorer = countingConjunctionSumScorer(disableCoord, similarity, optionalScorers);
}
return addProhibitedScorers(requiredCountingSumScorer); return addProhibitedScorers(requiredCountingSumScorer);
} }
private Scorer makeCountingSumScorerSomeReq() throws IOException { // At least one required scorer. private Scorer makeCountingSumScorerSomeReq(boolean disableCoord, Similarity similarity) throws IOException { // At least one required scorer.
if (optionalScorers.size() == minNrShouldMatch) { // all optional scorers also required. if (optionalScorers.size() == minNrShouldMatch) { // all optional scorers also required.
ArrayList<Scorer> allReq = new ArrayList<Scorer>(requiredScorers); ArrayList<Scorer> allReq = new ArrayList<Scorer>(requiredScorers);
allReq.addAll(optionalScorers); allReq.addAll(optionalScorers);
return addProhibitedScorers(countingConjunctionSumScorer(allReq)); return addProhibitedScorers(countingConjunctionSumScorer(disableCoord, similarity, allReq));
} else { // optionalScorers.size() > minNrShouldMatch, and at least one required scorer } else { // optionalScorers.size() > minNrShouldMatch, and at least one required scorer
Scorer requiredCountingSumScorer = Scorer requiredCountingSumScorer =
requiredScorers.size() == 1 requiredScorers.size() == 1
? new SingleMatchScorer(requiredScorers.get(0)) ? new SingleMatchScorer(requiredScorers.get(0))
: countingConjunctionSumScorer(requiredScorers); : countingConjunctionSumScorer(disableCoord, similarity, requiredScorers);
if (minNrShouldMatch > 0) { // use a required disjunction scorer over the optional scorers if (minNrShouldMatch > 0) { // use a required disjunction scorer over the optional scorers
return addProhibitedScorers( return addProhibitedScorers(
dualConjunctionSumScorer( // non counting dualConjunctionSumScorer( // non counting
disableCoord,
similarity,
requiredCountingSumScorer, requiredCountingSumScorer,
countingDisjunctionSumScorer( countingDisjunctionSumScorer(
optionalScorers, optionalScorers,

View File

@ -29,14 +29,14 @@ class ConjunctionScorer extends Scorer {
private final float coord; private final float coord;
private int lastDoc = -1; private int lastDoc = -1;
public ConjunctionScorer(Similarity similarity, Collection<Scorer> scorers) throws IOException { public ConjunctionScorer(float coord, Collection<Scorer> scorers) throws IOException {
this(similarity, scorers.toArray(new Scorer[scorers.size()])); this(coord, scorers.toArray(new Scorer[scorers.size()]));
} }
public ConjunctionScorer(Similarity similarity, Scorer... scorers) throws IOException { public ConjunctionScorer(float coord, Scorer... scorers) throws IOException {
super(similarity); super(null);
this.scorers = scorers; this.scorers = scorers;
coord = similarity.coord(scorers.length, scorers.length); this.coord = coord;
for (int i = 0; i < scorers.length; i++) { for (int i = 0; i < scorers.length; i++) {
if (scorers[i].nextDoc() == NO_MORE_DOCS) { if (scorers[i].nextDoc() == NO_MORE_DOCS) {

View File

@ -37,13 +37,7 @@ public class DefaultSimilarity extends Similarity {
numTerms = state.getLength() - state.getNumOverlap(); numTerms = state.getLength() - state.getNumOverlap();
else else
numTerms = state.getLength(); numTerms = state.getLength();
return (state.getBoost() * lengthNorm(field, numTerms)); return state.getBoost() * ((float) (1.0 / Math.sqrt(numTerms)));
}
/** Implemented as <code>1/sqrt(numTerms)</code>. */
@Override
public float lengthNorm(String fieldName, int numTerms) {
return (float)(1.0 / Math.sqrt(numTerms));
} }
/** Implemented as <code>1/sqrt(sumOfSquaredWeights)</code>. */ /** Implemented as <code>1/sqrt(sumOfSquaredWeights)</code>. */

View File

@ -462,12 +462,14 @@ import org.apache.lucene.util.SmallFloat;
* {@link org.apache.lucene.document.Fieldable#setBoost(float) field.setBoost()} * {@link org.apache.lucene.document.Fieldable#setBoost(float) field.setBoost()}
* before adding the field to a document. * before adding the field to a document.
* </li> * </li>
* <li>{@link #lengthNorm(String, int) <b>lengthNorm</b>(field)} - computed * <li><b>lengthNorm</b> - computed
* when the document is added to the index in accordance with the number of tokens * when the document is added to the index in accordance with the number of tokens
* of this field in the document, so that shorter fields contribute more to the score. * of this field in the document, so that shorter fields contribute more to the score.
* LengthNorm is computed by the Similarity class in effect at indexing. * LengthNorm is computed by the Similarity class in effect at indexing.
* </li> * </li>
* </ul> * </ul>
* The {@link #computeNorm} method is responsible for
* combining all of these factors into a single float.
* *
* <p> * <p>
* When a document is added to the index, all the above factors are multiplied. * When a document is added to the index, all the above factors are multiplied.
@ -480,7 +482,7 @@ import org.apache.lucene.util.SmallFloat;
* norm(t,d) &nbsp; = &nbsp; * norm(t,d) &nbsp; = &nbsp;
* {@link org.apache.lucene.document.Document#getBoost() doc.getBoost()} * {@link org.apache.lucene.document.Document#getBoost() doc.getBoost()}
* &nbsp;&middot;&nbsp; * &nbsp;&middot;&nbsp;
* {@link #lengthNorm(String, int) lengthNorm(field)} * lengthNorm
* &nbsp;&middot;&nbsp; * &nbsp;&middot;&nbsp;
* </td> * </td>
* <td valign="bottom" align="center" rowspan="1"> * <td valign="bottom" align="center" rowspan="1">
@ -570,12 +572,23 @@ public abstract class Similarity implements Serializable {
} }
/** /**
* Compute the normalization value for a field, given the accumulated * Computes the normalization value for a field, given the accumulated
* state of term processing for this field (see {@link FieldInvertState}). * state of term processing for this field (see {@link FieldInvertState}).
* *
* <p>Implementations should calculate a float value based on the field * <p>Implementations should calculate a float value based on the field
* state and then return that value. * state and then return that value.
* *
* <p>Matches in longer fields are less precise, so implementations of this
* method usually return smaller values when <code>state.getLength()</code> is large,
* and larger values when <code>state.getLength()</code> is small.
*
* <p>Note that the return values are computed under
* {@link org.apache.lucene.index.IndexWriter#addDocument(org.apache.lucene.document.Document)}
* and then stored using
* {@link #encodeNormValue(float)}.
* Thus they have limited precision, and documents
* must be re-indexed if this method is altered.
*
* <p>For backward compatibility this method by default calls * <p>For backward compatibility this method by default calls
* {@link #lengthNorm(String, int)} passing * {@link #lengthNorm(String, int)} passing
* {@link FieldInvertState#getLength()} as the second argument, and * {@link FieldInvertState#getLength()} as the second argument, and
@ -587,9 +600,7 @@ public abstract class Similarity implements Serializable {
* @param state current processing state for this field * @param state current processing state for this field
* @return the calculated float norm * @return the calculated float norm
*/ */
public float computeNorm(String field, FieldInvertState state) { public abstract float computeNorm(String field, FieldInvertState state);
return (state.getBoost() * lengthNorm(field, state.getLength()));
}
/** Computes the normalization value for a field given the total number of /** Computes the normalization value for a field given the total number of
* terms contained in a field. These values, together with field boosts, are * terms contained in a field. These values, together with field boosts, are
@ -613,8 +624,13 @@ public abstract class Similarity implements Serializable {
* @return a normalization factor for hits on this field of this document * @return a normalization factor for hits on this field of this document
* *
* @see org.apache.lucene.document.Field#setBoost(float) * @see org.apache.lucene.document.Field#setBoost(float)
*
* @deprecated Please override computeNorm instead
*/ */
public abstract float lengthNorm(String fieldName, int numTokens); @Deprecated
public final float lengthNorm(String fieldName, int numTokens) {
throw new UnsupportedOperationException("please use computeNorm instead");
}
/** Computes the normalization value for a query given the sum of the squared /** Computes the normalization value for a query given the sum of the squared
* weights of each of the query terms. This value is multiplied into the * weights of each of the query terms. This value is multiplied into the

View File

@ -1,76 +0,0 @@
package org.apache.lucene.search;
import org.apache.lucene.index.FieldInvertState;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** Expert: Delegating scoring implementation. Useful in {@link
* Query#getSimilarity(IndexSearcher)} implementations, to override only certain
* methods of a Searcher's Similarity implementation.. */
public class SimilarityDelegator extends Similarity {
private Similarity delegee;
/** Construct a {@link Similarity} that delegates all methods to another.
*
* @param delegee the Similarity implementation to delegate to
*/
public SimilarityDelegator(Similarity delegee) {
this.delegee = delegee;
}
@Override
public float computeNorm(String fieldName, FieldInvertState state) {
return delegee.computeNorm(fieldName, state);
}
@Override
public float lengthNorm(String fieldName, int numTerms) {
return delegee.lengthNorm(fieldName, numTerms);
}
@Override
public float queryNorm(float sumOfSquaredWeights) {
return delegee.queryNorm(sumOfSquaredWeights);
}
@Override
public float tf(float freq) {
return delegee.tf(freq);
}
@Override
public float sloppyFreq(int distance) {
return delegee.sloppyFreq(distance);
}
@Override
public float idf(int docFreq, int numDocs) {
return delegee.idf(docFreq, numDocs);
}
@Override
public float coord(int overlap, int maxOverlap) {
return delegee.coord(overlap, maxOverlap);
}
@Override
public float scorePayload(int docId, String fieldName, int start, int end, byte [] payload, int offset, int length) {
return delegee.scorePayload(docId, fieldName, start, end, payload, offset, length);
}
}

View File

@ -42,8 +42,9 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
private class SimilarityOne extends DefaultSimilarity { private class SimilarityOne extends DefaultSimilarity {
@Override @Override
public float lengthNorm(String fieldName, int numTerms) { public float computeNorm(String fieldName, FieldInvertState state) {
return 1; // diable length norm
return state.getBoost();
} }
} }

View File

@ -41,8 +41,9 @@ public class TestNorms extends LuceneTestCase {
private class SimilarityOne extends DefaultSimilarity { private class SimilarityOne extends DefaultSimilarity {
@Override @Override
public float lengthNorm(String fieldName, int numTerms) { public float computeNorm(String fieldName, FieldInvertState state) {
return 1; // Disable length norm
return state.getBoost();
} }
} }

View File

@ -35,7 +35,7 @@ import org.apache.lucene.search.Explanation.IDFExplanation;
public class TestOmitTf extends LuceneTestCase { public class TestOmitTf extends LuceneTestCase {
public static class SimpleSimilarity extends Similarity { public static class SimpleSimilarity extends Similarity {
@Override public float lengthNorm(String field, int numTerms) { return 1.0f; } @Override public float computeNorm(String field, FieldInvertState state) { return state.getBoost(); }
@Override public float queryNorm(float sumOfSquaredWeights) { return 1.0f; } @Override public float queryNorm(float sumOfSquaredWeights) { return 1.0f; }
@Override public float tf(float freq) { return freq; } @Override public float tf(float freq) { return freq; }
@Override public float sloppyFreq(int distance) { return 2.0f; } @Override public float sloppyFreq(int distance) { return 2.0f; }

View File

@ -22,6 +22,7 @@ import java.io.IOException;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexReader.ReaderContext; import org.apache.lucene.index.IndexReader.ReaderContext;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.PriorityQueue;
/** /**
@ -253,7 +254,7 @@ final class JustCompileSearch {
} }
@Override @Override
public float lengthNorm(String fieldName, int numTokens) { public float computeNorm(String fieldName, FieldInvertState state) {
throw new UnsupportedOperationException(UNSUPPORTED_MSG); throw new UnsupportedOperationException(UNSUPPORTED_MSG);
} }

View File

@ -83,7 +83,7 @@ public class TestBooleanScorer extends LuceneTestCase
} }
}}; }};
BooleanScorer bs = new BooleanScorer(null, sim, 1, Arrays.asList(scorers), null, scorers.length); BooleanScorer bs = new BooleanScorer(null, false, sim, 1, Arrays.asList(scorers), null, scorers.length);
assertEquals("should have received 3000", 3000, bs.nextDoc()); assertEquals("should have received 3000", 3000, bs.nextDoc());
assertEquals("should have received NO_MORE_DOCS", DocIdSetIterator.NO_MORE_DOCS, bs.nextDoc()); assertEquals("should have received NO_MORE_DOCS", DocIdSetIterator.NO_MORE_DOCS, bs.nextDoc());

View File

@ -23,6 +23,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.SlowMultiReaderWrapper; import org.apache.lucene.index.SlowMultiReaderWrapper;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexReader.ReaderContext; import org.apache.lucene.index.IndexReader.ReaderContext;
@ -60,8 +61,9 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
} }
@Override @Override
public float lengthNorm(String fieldName, int numTerms) { public float computeNorm(String fieldName, FieldInvertState state) {
return 1.0f; // Disable length norm
return state.getBoost();
} }
@Override @Override

View File

@ -21,6 +21,7 @@ import org.apache.lucene.util.LuceneTestCase;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
@ -38,7 +39,7 @@ import org.apache.lucene.search.Explanation.IDFExplanation;
public class TestSimilarity extends LuceneTestCase { public class TestSimilarity extends LuceneTestCase {
public static class SimpleSimilarity extends Similarity { public static class SimpleSimilarity extends Similarity {
@Override public float lengthNorm(String field, int numTerms) { return 1.0f; } @Override public float computeNorm(String field, FieldInvertState state) { return state.getBoost(); }
@Override public float queryNorm(float sumOfSquaredWeights) { return 1.0f; } @Override public float queryNorm(float sumOfSquaredWeights) { return 1.0f; }
@Override public float tf(float freq) { return freq; } @Override public float tf(float freq) { return freq; }
@Override public float sloppyFreq(int distance) { return 2.0f; } @Override public float sloppyFreq(int distance) { return 2.0f; }

View File

@ -26,6 +26,7 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Payload; import org.apache.lucene.index.Payload;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
@ -305,8 +306,8 @@ public class TestPayloadNearQuery extends LuceneTestCase {
//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
//Make everything else 1 so we see the effect of the payload //Make everything else 1 so we see the effect of the payload
//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@Override public float lengthNorm(String fieldName, int numTerms) { @Override public float computeNorm(String fieldName, FieldInvertState state) {
return 1.0f; return state.getBoost();
} }
@Override public float queryNorm(float sumOfSquaredWeights) { @Override public float queryNorm(float sumOfSquaredWeights) {

View File

@ -34,6 +34,7 @@ import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Payload; import org.apache.lucene.index.Payload;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
@ -299,8 +300,8 @@ public class TestPayloadTermQuery extends LuceneTestCase {
//Make everything else 1 so we see the effect of the payload //Make everything else 1 so we see the effect of the payload
//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@Override @Override
public float lengthNorm(String fieldName, int numTerms) { public float computeNorm(String fieldName, FieldInvertState state) {
return 1; return state.getBoost();
} }
@Override @Override

View File

@ -419,7 +419,7 @@ public class TestSpans extends LuceneTestCase {
public Similarity getSimilarity(IndexSearcher s) { public Similarity getSimilarity(IndexSearcher s) {
return sim; return sim;
} }
}; };
Scorer spanScorer = snq.weight(searcher).scorer(new AtomicReaderContext(new SlowMultiReaderWrapper(searcher.getIndexReader())), true, false); Scorer spanScorer = snq.weight(searcher).scorer(new AtomicReaderContext(new SlowMultiReaderWrapper(searcher.getIndexReader())), true, false);

View File

@ -1,37 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search;
import org.apache.lucene.search.DefaultSimilarity;
import java.util.HashMap;
/**
*/
// don't make it public for now... easier to change later.
// This class is currently unused.
class SolrSimilarity extends DefaultSimilarity {
private final HashMap<String,Float> lengthNormConfig = new HashMap<String,Float>();
public float lengthNorm(String fieldName, int numTerms) {
// Float f = lengthNormConfig.
// if (lengthNormDisabled.)
return super.lengthNorm(fieldName, numTerms);
}
}

View File

@ -17,6 +17,7 @@
package org.apache.solr.search.function; package org.apache.solr.search.function;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.search.DefaultSimilarity; import org.apache.lucene.search.DefaultSimilarity;
import org.apache.lucene.search.FieldCache; import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.Similarity; import org.apache.lucene.search.Similarity;
@ -294,8 +295,11 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
"//float[@name='score']='" + similarity.idf(3,6) + "'"); "//float[@name='score']='" + similarity.idf(3,6) + "'");
assertQ(req("fl","*,score","q", "{!func}tf(a_t,cow)", "fq","id:6"), assertQ(req("fl","*,score","q", "{!func}tf(a_t,cow)", "fq","id:6"),
"//float[@name='score']='" + similarity.tf(5) + "'"); "//float[@name='score']='" + similarity.tf(5) + "'");
FieldInvertState state = new FieldInvertState();
state.setBoost(1.0f);
state.setLength(4);
assertQ(req("fl","*,score","q", "{!func}norm(a_t)", "fq","id:2"), assertQ(req("fl","*,score","q", "{!func}norm(a_t)", "fq","id:2"),
"//float[@name='score']='" + similarity.lengthNorm("a_t",4) + "'"); // sqrt(4)==2 and is exactly representable when quantized to a byte "//float[@name='score']='" + similarity.computeNorm("a_t",state) + "'"); // sqrt(4)==2 and is exactly representable when quantized to a byte
// test that ord and rord are working on a global index basis, not just // test that ord and rord are working on a global index basis, not just
// at the segment level (since Lucene 2.9 has switched to per-segment searching) // at the segment level (since Lucene 2.9 has switched to per-segment searching)