LUCENE-3256: Consolidated CustomScoreQuery and BoostedQuery into Queries module

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1141366 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Christopher John Male 2011-06-30 02:31:22 +00:00
parent a2cb2aa37a
commit 2266c70ce4
10 changed files with 257 additions and 184 deletions

View File

@ -35,8 +35,6 @@ import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.function.CustomScoreQuery;
import org.apache.lucene.search.function.CustomScoreProvider;
import org.apache.lucene.search.function.FieldScoreQuery;
import org.apache.lucene.search.function.FieldScoreQuery.Type;
import org.apache.lucene.spatial.DistanceUtils;
@ -216,33 +214,6 @@ public class TestCartesian extends LuceneTestCase {
if (VERBOSE) System.out.println(dq);
//create a term query to search against all documents
Query tq = new TermQuery(new Term("metafile", "doc"));
FieldScoreQuery fsQuery = new FieldScoreQuery("geo_distance", Type.FLOAT);
CustomScoreQuery customScore = new CustomScoreQuery(dq.getQuery(tq),fsQuery){
@Override
protected CustomScoreProvider getCustomScoreProvider(AtomicReaderContext context) {
return new CustomScoreProvider(context) {
@Override // TODO: broken, as reader is not used!
public float customScore(int doc, float subQueryScore, float valSrcScore){
if (VERBOSE) System.out.println(doc);
if (dq.distanceFilter.getDistance(doc) == null)
return 0;
double distance = dq.distanceFilter.getDistance(doc);
// boost score shouldn't exceed 1
if (distance < 1.0d)
distance = 1.0d;
//boost by distance is invertly proportional to
// to distance from center point to location
float score = (float) ((miles - distance) / miles );
return score * subQueryScore;
}
};
}
};
// Create a distance sort
// As the radius filter has performed the distance calculations
// already, pass in the filter to reuse the results.
@ -252,7 +223,7 @@ public class TestCartesian extends LuceneTestCase {
// Perform the search, using the term query, the serial chain filter, and the
// distance sort
TopDocs hits = searcher.search(customScore,null, 1000, sort);
TopDocs hits = searcher.search(dq.getQuery(tq),null, 1000, sort);
int results = hits.totalHits;
ScoreDoc[] scoreDocs = hits.scoreDocs;
@ -312,33 +283,6 @@ public class TestCartesian extends LuceneTestCase {
if (VERBOSE) System.out.println(dq);
//create a term query to search against all documents
Query tq = new TermQuery(new Term("metafile", "doc"));
FieldScoreQuery fsQuery = new FieldScoreQuery("geo_distance", Type.FLOAT);
CustomScoreQuery customScore = new CustomScoreQuery(dq.getQuery(tq),fsQuery){
@Override
protected CustomScoreProvider getCustomScoreProvider(AtomicReaderContext context) {
return new CustomScoreProvider(context) {
@Override // TODO: broken, as reader is not used!
public float customScore(int doc, float subQueryScore, float valSrcScore){
if (VERBOSE) System.out.println(doc);
if (dq.distanceFilter.getDistance(doc) == null)
return 0;
double distance = dq.distanceFilter.getDistance(doc);
// boost score shouldn't exceed 1
if (distance < 1.0d)
distance = 1.0d;
//boost by distance is invertly proportional to
// to distance from center point to location
float score = (float) ((miles - distance) / miles );
return score * subQueryScore;
}
};
}
};
// Create a distance sort
// As the radius filter has performed the distance calculations
// already, pass in the filter to reuse the results.
@ -348,7 +292,7 @@ public class TestCartesian extends LuceneTestCase {
// Perform the search, using the term query, the serial chain filter, and the
// distance sort
TopDocs hits = searcher.search(customScore,null, 1000, sort);
TopDocs hits = searcher.search(dq.getQuery(tq),null, 1000, sort);
int results = hits.totalHits;
ScoreDoc[] scoreDocs = hits.scoreDocs;
@ -410,31 +354,6 @@ public class TestCartesian extends LuceneTestCase {
if (VERBOSE) System.out.println(dq);
//create a term query to search against all documents
Query tq = new TermQuery(new Term("metafile", "doc"));
FieldScoreQuery fsQuery = new FieldScoreQuery("geo_distance", Type.FLOAT);
CustomScoreQuery customScore = new CustomScoreQuery(dq.getQuery(tq),fsQuery){
@Override
protected CustomScoreProvider getCustomScoreProvider(AtomicReaderContext context) {
return new CustomScoreProvider(context) {
@Override // TODO: broken, as reader is not used!
public float customScore(int doc, float subQueryScore, float valSrcScore){
if (VERBOSE) System.out.println(doc);
if (dq.distanceFilter.getDistance(doc) == null)
return 0;
double distance = dq.distanceFilter.getDistance(doc);
// boost score shouldn't exceed 1
if (distance < 1.0d)
distance = 1.0d;
//boost by distance is invertly proportional to
// to distance from center point to location
float score = (float) ( (miles - distance) / miles );
return score * subQueryScore;
}
};
}
};
// Create a distance sort
// As the radius filter has performed the distance calculations
// already, pass in the filter to reuse the results.
@ -444,7 +363,7 @@ public class TestCartesian extends LuceneTestCase {
// Perform the search, using the term query, the serial chain filter, and the
// distance sort
TopDocs hits = searcher.search(customScore,null, 1000, sort);
TopDocs hits = searcher.search(dq.getQuery(tq),null, 1000, sort);
int results = hits.totalHits;
ScoreDoc[] scoreDocs = hits.scoreDocs;
@ -506,30 +425,6 @@ public class TestCartesian extends LuceneTestCase {
if (VERBOSE) System.out.println(dq);
//create a term query to search against all documents
Query tq = new TermQuery(new Term("metafile", "doc"));
FieldScoreQuery fsQuery = new FieldScoreQuery("geo_distance", Type.FLOAT);
CustomScoreQuery customScore = new CustomScoreQuery(tq,fsQuery){
@Override
protected CustomScoreProvider getCustomScoreProvider(AtomicReaderContext context) {
return new CustomScoreProvider(context) {
@Override // TODO: broken, as reader is not used!
public float customScore(int doc, float subQueryScore, float valSrcScore){
if (VERBOSE) System.out.println(doc);
if (dq.distanceFilter.getDistance(doc) == null)
return 0;
double distance = dq.distanceFilter.getDistance(doc);
// boost score shouldn't exceed 1
if (distance < 1.0d)
distance = 1.0d;
//boost by distance is invertly proportional to
// to distance from center point to location
float score = (float) ( (miles - distance) / miles );
return score * subQueryScore;
}
};
}
};
// Create a distance sort
// As the radius filter has performed the distance calculations
// already, pass in the filter to reuse the results.
@ -539,7 +434,7 @@ public class TestCartesian extends LuceneTestCase {
// Perform the search, using the term query, the serial chain filter, and the
// distance sort
TopDocs hits = searcher.search(customScore,dq.getFilter(), 1000); //,sort);
TopDocs hits = searcher.search(tq,dq.getFilter(), 1000); //,sort);
int results = hits.totalHits;
ScoreDoc[] scoreDocs = hits.scoreDocs;

View File

@ -1,4 +1,4 @@
package org.apache.lucene.search.function;
package org.apache.lucene.queries;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -49,7 +49,7 @@ public class CustomScoreProvider {
/**
* Compute a custom score by the subQuery score and a number of
* {@link ValueSourceQuery} scores.
* {@link org.apache.lucene.queries.function.FunctionQuery} scores.
* <p>
* Subclasses can override this method to modify the custom score.
* <p>

View File

@ -1,4 +1,4 @@
package org.apache.lucene.search.function;
package org.apache.lucene.queries;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -36,9 +36,7 @@ import org.apache.lucene.util.ToStringUtils;
* Query that sets document score as a programmatic function of several (sub) scores:
* <ol>
* <li>the score of its subQuery (any query)</li>
* <li>(optional) the score of its ValueSourceQuery (or queries).
* For most simple/convenient use cases this query is likely to be a
* {@link org.apache.lucene.search.function.FieldScoreQuery FieldScoreQuery}</li>
* <li>(optional) the score of its ValueSourceQuery (or queries).</li>
* </ol>
* Subclasses can modify the computation by overriding {@link #getCustomScoreProvider}.
*
@ -47,7 +45,7 @@ import org.apache.lucene.util.ToStringUtils;
public class CustomScoreQuery extends Query {
private Query subQuery;
private ValueSourceQuery[] valSrcQueries; // never null (empty array if there are no valSrcQueries).
private Query[] scoringQueries; // never null (empty array if there are no valSrcQueries).
private boolean strict = false; // if true, valueSource part of query does not take part in weights normalization.
/**
@ -55,34 +53,30 @@ public class CustomScoreQuery extends Query {
* @param subQuery the sub query whose scored is being customized. Must not be null.
*/
public CustomScoreQuery(Query subQuery) {
this(subQuery, new ValueSourceQuery[0]);
this(subQuery, new Query[0]);
}
/**
* Create a CustomScoreQuery over input subQuery and a {@link ValueSourceQuery}.
* Create a CustomScoreQuery over input subQuery and a {@link org.apache.lucene.queries.function.FunctionQuery}.
* @param subQuery the sub query whose score is being customized. Must not be null.
* @param valSrcQuery a value source query whose scores are used in the custom score
* computation. For most simple/convenient use case this would be a
* {@link org.apache.lucene.search.function.FieldScoreQuery FieldScoreQuery}.
* This parameter is optional - it can be null.
* @param scoringQuery a value source query whose scores are used in the custom score
* computation. This parameter is optional - it can be null.
*/
public CustomScoreQuery(Query subQuery, ValueSourceQuery valSrcQuery) {
this(subQuery, valSrcQuery!=null ? // don't want an array that contains a single null..
new ValueSourceQuery[] {valSrcQuery} : new ValueSourceQuery[0]);
public CustomScoreQuery(Query subQuery, Query scoringQuery) {
this(subQuery, scoringQuery!=null ? // don't want an array that contains a single null..
new Query[] {scoringQuery} : new Query[0]);
}
/**
* Create a CustomScoreQuery over input subQuery and a {@link ValueSourceQuery}.
* Create a CustomScoreQuery over input subQuery and a {@link org.apache.lucene.queries.function.FunctionQuery}.
* @param subQuery the sub query whose score is being customized. Must not be null.
* @param valSrcQueries value source queries whose scores are used in the custom score
* computation. For most simple/convenient use case these would be
* {@link org.apache.lucene.search.function.FieldScoreQuery FieldScoreQueries}.
* This parameter is optional - it can be null or even an empty array.
* @param scoringQueries value source queries whose scores are used in the custom score
* computation. This parameter is optional - it can be null or even an empty array.
*/
public CustomScoreQuery(Query subQuery, ValueSourceQuery... valSrcQueries) {
public CustomScoreQuery(Query subQuery, Query... scoringQueries) {
this.subQuery = subQuery;
this.valSrcQueries = valSrcQueries!=null?
valSrcQueries : new ValueSourceQuery[0];
this.scoringQueries = scoringQueries !=null?
scoringQueries : new Query[0];
if (subQuery == null) throw new IllegalArgumentException("<subquery> must not be null!");
}
@ -97,11 +91,11 @@ public class CustomScoreQuery extends Query {
clone.subQuery = sq;
}
for(int i = 0; i < valSrcQueries.length; i++) {
final ValueSourceQuery v = (ValueSourceQuery) valSrcQueries[i].rewrite(reader);
if (v != valSrcQueries[i]) {
for(int i = 0; i < scoringQueries.length; i++) {
final Query v = scoringQueries[i].rewrite(reader);
if (v != scoringQueries[i]) {
if (clone == null) clone = (CustomScoreQuery) clone();
clone.valSrcQueries[i] = v;
clone.scoringQueries[i] = v;
}
}
@ -112,8 +106,8 @@ public class CustomScoreQuery extends Query {
@Override
public void extractTerms(Set<Term> terms) {
subQuery.extractTerms(terms);
for(int i = 0; i < valSrcQueries.length; i++) {
valSrcQueries[i].extractTerms(terms);
for(int i = 0; i < scoringQueries.length; i++) {
scoringQueries[i].extractTerms(terms);
}
}
@ -122,9 +116,9 @@ public class CustomScoreQuery extends Query {
public Object clone() {
CustomScoreQuery clone = (CustomScoreQuery)super.clone();
clone.subQuery = (Query) subQuery.clone();
clone.valSrcQueries = new ValueSourceQuery[valSrcQueries.length];
for(int i = 0; i < valSrcQueries.length; i++) {
clone.valSrcQueries[i] = (ValueSourceQuery) valSrcQueries[i].clone();
clone.scoringQueries = new Query[scoringQueries.length];
for(int i = 0; i < scoringQueries.length; i++) {
clone.scoringQueries[i] = (Query) scoringQueries[i].clone();
}
return clone;
}
@ -134,8 +128,8 @@ public class CustomScoreQuery extends Query {
public String toString(String field) {
StringBuilder sb = new StringBuilder(name()).append("(");
sb.append(subQuery.toString(field));
for(int i = 0; i < valSrcQueries.length; i++) {
sb.append(", ").append(valSrcQueries[i].toString(field));
for(int i = 0; i < scoringQueries.length; i++) {
sb.append(", ").append(scoringQueries[i].toString(field));
}
sb.append(")");
sb.append(strict?" STRICT" : "");
@ -156,16 +150,16 @@ public class CustomScoreQuery extends Query {
if (this.getBoost() != other.getBoost() ||
!this.subQuery.equals(other.subQuery) ||
this.strict != other.strict ||
this.valSrcQueries.length != other.valSrcQueries.length) {
this.scoringQueries.length != other.scoringQueries.length) {
return false;
}
return Arrays.equals(valSrcQueries, other.valSrcQueries);
return Arrays.equals(scoringQueries, other.scoringQueries);
}
/** Returns a hash code value for this object. */
@Override
public int hashCode() {
return (getClass().hashCode() + subQuery.hashCode() + Arrays.hashCode(valSrcQueries))
return (getClass().hashCode() + subQuery.hashCode() + Arrays.hashCode(scoringQueries))
^ Float.floatToIntBits(getBoost()) ^ (strict ? 1234 : 4321);
}
@ -188,9 +182,9 @@ public class CustomScoreQuery extends Query {
public CustomWeight(IndexSearcher searcher) throws IOException {
this.subQueryWeight = subQuery.createWeight(searcher);
this.valSrcWeights = new Weight[valSrcQueries.length];
for(int i = 0; i < valSrcQueries.length; i++) {
this.valSrcWeights[i] = valSrcQueries[i].createWeight(searcher);
this.valSrcWeights = new Weight[scoringQueries.length];
for(int i = 0; i < scoringQueries.length; i++) {
this.valSrcWeights[i] = scoringQueries[i].createWeight(searcher);
}
this.qStrict = strict;
}

View File

@ -1,4 +1,6 @@
/**
package org.apache.lucene.queries.function;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
@ -15,10 +17,6 @@
* limitations under the License.
*/
package org.apache.solr.search.function;
import org.apache.lucene.queries.function.DocValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.search.*;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexReader.AtomicReaderContext;

View File

@ -197,7 +197,7 @@ public class FunctionQuery extends Query {
/** Returns true if <code>o</code> is equal to this. */
@Override
public boolean equals(Object o) {
if (FunctionQuery.class != o.getClass()) return false;
if (!FunctionQuery.class.isInstance(o)) return false;
FunctionQuery other = (FunctionQuery)o;
return this.getBoost() == other.getBoost()
&& this.func.equals(other.func);

View File

@ -1,4 +1,4 @@
package org.apache.lucene.search.function;
package org.apache.lucene.queries;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -17,9 +17,15 @@ package org.apache.lucene.search.function;
* limitations under the License.
*/
import org.apache.lucene.queries.function.FunctionQuery;
import org.apache.lucene.queries.function.FunctionTestSetup;
import org.apache.lucene.queries.function.valuesource.ByteFieldSource;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.queries.function.valuesource.IntFieldSource;
import org.apache.lucene.queries.function.valuesource.ShortFieldSource;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.search.*;
import org.apache.lucene.search.cache.*;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
@ -43,48 +49,64 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
* Test that CustomScoreQuery of Type.BYTE returns the expected scores.
*/
@Test
public void testCustomScoreByte() throws Exception, ParseException {
public void testCustomScoreByte() throws Exception {
// INT field values are small enough to be parsed as byte
doTestCustomScore(INT_FIELD, FieldScoreQuery.Type.BYTE, 1.0);
doTestCustomScore(INT_FIELD, FieldScoreQuery.Type.BYTE, 2.0);
ByteValuesCreator valuesCreator = new ByteValuesCreator(INT_FIELD, null, CachedArrayCreator.CACHE_VALUES_AND_BITS);
FunctionQuery functionQuery = new FunctionQuery(new ByteFieldSource(valuesCreator));
doTestCustomScore(functionQuery, 1.0);
doTestCustomScore(functionQuery, 2.0);
}
/**
* Test that CustomScoreQuery of Type.SHORT returns the expected scores.
*/
@Test
public void testCustomScoreShort() throws Exception, ParseException {
public void testCustomScoreShort() throws Exception {
// INT field values are small enough to be parsed as short
doTestCustomScore(INT_FIELD, FieldScoreQuery.Type.SHORT, 1.0);
doTestCustomScore(INT_FIELD, FieldScoreQuery.Type.SHORT, 3.0);
ShortValuesCreator valuesCreator = new ShortValuesCreator(INT_FIELD, null, CachedArrayCreator.CACHE_VALUES_AND_BITS);
FunctionQuery functionQuery = new FunctionQuery(new ShortFieldSource(valuesCreator));
doTestCustomScore(functionQuery, 1.0);
doTestCustomScore(functionQuery, 3.0);
}
/**
* Test that CustomScoreQuery of Type.INT returns the expected scores.
*/
@Test
public void testCustomScoreInt() throws Exception, ParseException {
doTestCustomScore(INT_FIELD, FieldScoreQuery.Type.INT, 1.0);
doTestCustomScore(INT_FIELD, FieldScoreQuery.Type.INT, 4.0);
public void testCustomScoreInt() throws Exception {
IntValuesCreator valuesCreator = new IntValuesCreator(INT_FIELD, null, CachedArrayCreator.CACHE_VALUES_AND_BITS);
FunctionQuery functionQuery = new FunctionQuery(new IntFieldSource(valuesCreator));
doTestCustomScore(functionQuery, 1.0);
doTestCustomScore(functionQuery, 4.0);
}
/**
* Test that CustomScoreQuery of Type.FLOAT returns the expected scores.
*/
@Test
public void testCustomScoreFloat() throws Exception, ParseException {
public void testCustomScoreFloat() throws Exception {
// INT field can be parsed as float
doTestCustomScore(INT_FIELD, FieldScoreQuery.Type.FLOAT, 1.0);
doTestCustomScore(INT_FIELD, FieldScoreQuery.Type.FLOAT, 5.0);
FloatValuesCreator valuesCreator = new FloatValuesCreator(INT_FIELD, null, CachedArrayCreator.CACHE_VALUES_AND_BITS);
FunctionQuery functionQuery = new FunctionQuery(new FloatFieldSource(valuesCreator));
doTestCustomScore(functionQuery, 1.0);
doTestCustomScore(functionQuery, 5.0);
// same values, but in float format
doTestCustomScore(FLOAT_FIELD, FieldScoreQuery.Type.FLOAT, 1.0);
doTestCustomScore(FLOAT_FIELD, FieldScoreQuery.Type.FLOAT, 6.0);
valuesCreator = new FloatValuesCreator(FLOAT_FIELD, null, CachedArrayCreator.CACHE_VALUES_AND_BITS);
functionQuery = new FunctionQuery(new FloatFieldSource(valuesCreator));
doTestCustomScore(functionQuery, 1.0);
doTestCustomScore(functionQuery, 6.0);
}
// must have static class otherwise serialization tests fail
private static class CustomAddQuery extends CustomScoreQuery {
// constructor
CustomAddQuery(Query q, ValueSourceQuery qValSrc) {
CustomAddQuery(Query q, FunctionQuery qValSrc) {
super(q, qValSrc);
}
@ -119,7 +141,7 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
// must have static class otherwise serialization tests fail
private static class CustomMulAddQuery extends CustomScoreQuery {
// constructor
CustomMulAddQuery(Query q, ValueSourceQuery qValSrc1, ValueSourceQuery qValSrc2) {
CustomMulAddQuery(Query q, FunctionQuery qValSrc1, FunctionQuery qValSrc2) {
super(q, qValSrc1, qValSrc2);
}
@ -228,10 +250,9 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
}
// Test that FieldScoreQuery returns docs with expected score.
private void doTestCustomScore(String field, FieldScoreQuery.Type tp, double dboost) throws Exception, ParseException {
private void doTestCustomScore(FunctionQuery functionQuery, double dboost) throws Exception {
float boost = (float) dboost;
IndexSearcher s = new IndexSearcher(dir, true);
FieldScoreQuery qValSrc = new FieldScoreQuery(field, tp); // a query that would score by the field
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, TEXT_FIELD, anlzr);
String qtxt = "first aid text"; // from the doc texts in FunctionQuerySetup.
@ -245,19 +266,19 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
log(q2CustomNeutral);
// custom query, that should (by default) multiply the scores of q1 by that of the field
CustomScoreQuery q3CustomMul = new CustomScoreQuery(q1, qValSrc);
CustomScoreQuery q3CustomMul = new CustomScoreQuery(q1, functionQuery);
q3CustomMul.setStrict(true);
q3CustomMul.setBoost(boost);
log(q3CustomMul);
// custom query, that should add the scores of q1 to that of the field
CustomScoreQuery q4CustomAdd = new CustomAddQuery(q1, qValSrc);
CustomScoreQuery q4CustomAdd = new CustomAddQuery(q1, functionQuery);
q4CustomAdd.setStrict(true);
q4CustomAdd.setBoost(boost);
log(q4CustomAdd);
// custom query, that multiplies and adds the field score to that of q1
CustomScoreQuery q5CustomMulAdd = new CustomMulAddQuery(q1, qValSrc, qValSrc);
CustomScoreQuery q5CustomMulAdd = new CustomMulAddQuery(q1, functionQuery, functionQuery);
q5CustomMulAdd.setStrict(true);
q5CustomMulAdd.setBoost(boost);
log(q5CustomMulAdd);

View File

@ -0,0 +1,165 @@
package org.apache.lucene.queries.function;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
import org.junit.AfterClass;
import org.junit.Ignore;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Setup for function tests
*/
@Ignore
public abstract class FunctionTestSetup extends LuceneTestCase {
/**
* Actual score computation order is slightly different than assumptios
* this allows for a small amount of variation
*/
protected static float TEST_SCORE_TOLERANCE_DELTA = 0.001f;
protected static final int N_DOCS = 17; // select a primary number > 2
protected static final String ID_FIELD = "id";
protected static final String TEXT_FIELD = "text";
protected static final String INT_FIELD = "iii";
protected static final String FLOAT_FIELD = "fff";
private static final String DOC_TEXT_LINES[] = {
"Well, this is just some plain text we use for creating the ",
"test documents. It used to be a text from an online collection ",
"devoted to first aid, but if there was there an (online) lawyers ",
"first aid collection with legal advices, \"it\" might have quite ",
"probably advised one not to include \"it\"'s text or the text of ",
"any other online collection in one's code, unless one has money ",
"that one don't need and one is happy to donate for lawyers ",
"charity. Anyhow at some point, rechecking the usage of this text, ",
"it became uncertain that this text is free to use, because ",
"the web site in the disclaimer of he eBook containing that text ",
"was not responding anymore, and at the same time, in projGut, ",
"searching for first aid no longer found that eBook as well. ",
"So here we are, with a perhaps much less interesting ",
"text for the test, but oh much much safer. ",
};
protected static Directory dir;
protected static Analyzer anlzr;
@AfterClass
public static void afterClassFunctionTestSetup() throws Exception {
dir.close();
dir = null;
anlzr = null;
}
protected static void createIndex(boolean doMultiSegment) throws Exception {
if (VERBOSE) {
System.out.println("TEST: setUp");
}
// prepare a small index with just a few documents.
dir = newDirectory();
anlzr = new MockAnalyzer(random);
IndexWriterConfig iwc = newIndexWriterConfig( TEST_VERSION_CURRENT, anlzr).setMergePolicy(newLogMergePolicy());
if (doMultiSegment) {
iwc.setMaxBufferedDocs(_TestUtil.nextInt(random, 2, 7));
}
RandomIndexWriter iw = new RandomIndexWriter(random, dir, iwc);
iw.w.setInfoStream(VERBOSE ? System.out : null);
// add docs not exactly in natural ID order, to verify we do check the order of docs by scores
int remaining = N_DOCS;
boolean done[] = new boolean[N_DOCS];
int i = 0;
while (remaining > 0) {
if (done[i]) {
throw new Exception("to set this test correctly N_DOCS=" + N_DOCS + " must be primary and greater than 2!");
}
addDoc(iw, i);
done[i] = true;
i = (i + 4) % N_DOCS;
remaining --;
}
if (!doMultiSegment) {
if (VERBOSE) {
System.out.println("TEST: setUp optimize");
}
iw.optimize();
}
iw.close();
if (VERBOSE) {
System.out.println("TEST: setUp done close");
}
}
private static void addDoc(RandomIndexWriter iw, int i) throws Exception {
Document d = new Document();
Fieldable f;
int scoreAndID = i + 1;
f = newField(ID_FIELD, id2String(scoreAndID), Field.Store.YES, Field.Index.NOT_ANALYZED); // for debug purposes
f.setOmitNorms(true);
d.add(f);
f = newField(TEXT_FIELD, "text of doc" + scoreAndID + textLine(i), Field.Store.NO, Field.Index.ANALYZED); // for regular search
f.setOmitNorms(true);
d.add(f);
f = newField(INT_FIELD, "" + scoreAndID, Field.Store.NO, Field.Index.NOT_ANALYZED); // for function scoring
f.setOmitNorms(true);
d.add(f);
f = newField(FLOAT_FIELD, scoreAndID + ".000", Field.Store.NO, Field.Index.NOT_ANALYZED); // for function scoring
f.setOmitNorms(true);
d.add(f);
iw.addDocument(d);
log("added: " + d);
}
// 17 --> ID00017
protected static String id2String(int scoreAndID) {
String s = "000000000" + scoreAndID;
int n = ("" + N_DOCS).length() + 3;
int k = s.length() - n;
return "ID" + s.substring(k);
}
// some text line for regular search
private static String textLine(int docNum) {
return DOC_TEXT_LINES[docNum % DOC_TEXT_LINES.length];
}
// extract expected doc score from its ID Field: "ID7" --> 7.0
protected static float expectedFieldScore(String docIDFieldVal) {
return Float.parseFloat(docIDFieldVal.substring(2));
}
// debug messages (change DBG to true for anything to print)
protected static void log(Object o) {
if (VERBOSE) {
System.out.println(o.toString());
}
}
}

View File

@ -16,6 +16,7 @@
*/
package org.apache.solr.search;
import org.apache.lucene.queries.function.BoostedQuery;
import org.apache.lucene.queries.function.FunctionQuery;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.QueryValueSource;
@ -24,7 +25,6 @@ import org.apache.lucene.search.Query;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.function.BoostedQuery;
/**
* Create a boosted query from the input value. The main value is the query to be boosted.

View File

@ -22,6 +22,7 @@
package org.apache.solr.search;
import org.apache.lucene.queries.function.BoostedQuery;
import org.apache.lucene.queries.function.FunctionQuery;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.ProductFloatFunction;
@ -38,7 +39,6 @@ import org.apache.solr.common.util.NamedList;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.schema.FieldType;
import org.apache.solr.search.QueryUtils;
import org.apache.solr.search.function.BoostedQuery;
import org.apache.solr.util.SolrPluginUtils;
import org.apache.solr.analysis.*;

View File

@ -18,6 +18,7 @@ package org.apache.solr.search;
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.function.BoostedQuery;
import org.apache.lucene.queries.function.DocValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.docvalues.BoolDocValues;
@ -38,7 +39,6 @@ import org.apache.lucene.util.UnicodeUtil;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.schema.*;
import org.apache.solr.search.function.*;
import org.apache.solr.search.function.distance.*;
import org.apache.solr.util.plugin.NamedListInitializedPlugin;