mirror of https://github.com/apache/lucene.git
LUCENE-8038: Remove deprecated PayloadScoreQuery methods
This commit is contained in:
parent
bba2b6d418
commit
a744654bca
|
@ -17,6 +17,9 @@ API Changes
|
|||
stored they are equal to TermsEnum.docFreq() and Terms.getSumDocFreq(),
|
||||
respectively, because all freq() values equal 1. (Adrien Grand, Robert Muir)
|
||||
|
||||
* LUCENE-8038: Deprecated PayloadScoreQuery constructors have been removed (Alan
|
||||
Woodward)
|
||||
|
||||
Changes in Runtime Behavior
|
||||
|
||||
* LUCENE-7837: Indices that were created before the previous major version
|
||||
|
|
|
@ -24,16 +24,6 @@ import org.apache.lucene.util.BytesRef;
|
|||
*/
|
||||
public interface PayloadDecoder {
|
||||
|
||||
/**
|
||||
* Compute a float value based on the doc, position and payload
|
||||
* @deprecated Use {@link #computePayloadFactor(BytesRef)} - doc and position can be taken
|
||||
* into account in {@link PayloadFunction#currentScore(int, String, int, int, int, float, float)}
|
||||
*/
|
||||
@Deprecated
|
||||
default float computePayloadFactor(int docID, int startPosition, int endPosition, BytesRef payload) {
|
||||
return computePayloadFactor(payload);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute a float value for the given payload
|
||||
*/
|
||||
|
|
|
@ -59,22 +59,10 @@ public class PayloadScoreQuery extends SpanQuery {
|
|||
public PayloadScoreQuery(SpanQuery wrappedQuery, PayloadFunction function, PayloadDecoder decoder, boolean includeSpanScore) {
|
||||
this.wrappedQuery = Objects.requireNonNull(wrappedQuery);
|
||||
this.function = Objects.requireNonNull(function);
|
||||
this.decoder = decoder;
|
||||
this.decoder = Objects.requireNonNull(decoder);
|
||||
this.includeSpanScore = includeSpanScore;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new PayloadScoreQuery
|
||||
* @param wrappedQuery the query to wrap
|
||||
* @param function a PayloadFunction to use to modify the scores
|
||||
* @param includeSpanScore include both span score and payload score in the scoring algorithm
|
||||
* @deprecated Use {@link #PayloadScoreQuery(SpanQuery, PayloadFunction, PayloadDecoder, boolean)}
|
||||
*/
|
||||
@Deprecated
|
||||
public PayloadScoreQuery(SpanQuery wrappedQuery, PayloadFunction function, boolean includeSpanScore) {
|
||||
this(wrappedQuery, function, null, includeSpanScore);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new PayloadScoreQuery that includes the underlying span scores
|
||||
* @param wrappedQuery the query to wrap
|
||||
|
@ -84,17 +72,6 @@ public class PayloadScoreQuery extends SpanQuery {
|
|||
this(wrappedQuery, function, decoder, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new PayloadScoreQuery that includes the underlying span scores
|
||||
* @param wrappedQuery the query to wrap
|
||||
* @param function a PayloadFunction to use to modify the scores
|
||||
* @deprecated Use {@link #PayloadScoreQuery(SpanQuery, PayloadFunction, PayloadDecoder)}
|
||||
*/
|
||||
@Deprecated
|
||||
public PayloadScoreQuery(SpanQuery wrappedQuery, PayloadFunction function) {
|
||||
this(wrappedQuery, function, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getField() {
|
||||
return wrappedQuery.getField();
|
||||
|
@ -173,8 +150,7 @@ public class PayloadScoreQuery extends SpanQuery {
|
|||
if (spans == null)
|
||||
return null;
|
||||
SimScorer docScorer = innerWeight.getSimScorer(context);
|
||||
PayloadSpans payloadSpans = new PayloadSpans(spans,
|
||||
decoder == null ? new SimilarityPayloadDecoder(docScorer) : decoder);
|
||||
PayloadSpans payloadSpans = new PayloadSpans(spans, decoder);
|
||||
return new PayloadSpanScorer(this, payloadSpans, docScorer);
|
||||
}
|
||||
|
||||
|
@ -232,7 +208,7 @@ public class PayloadScoreQuery extends SpanQuery {
|
|||
@Override
|
||||
public void collectLeaf(PostingsEnum postings, int position, Term term) throws IOException {
|
||||
BytesRef payload = postings.getPayload();
|
||||
float payloadFactor = decoder.computePayloadFactor(docID(), in.startPosition(), in.endPosition(), payload);
|
||||
float payloadFactor = decoder.computePayloadFactor(payload);
|
||||
payloadScore = function.currentScore(docID(), getField(), in.startPosition(), in.endPosition(),
|
||||
payloadsSeen, payloadScore, payloadFactor);
|
||||
payloadsSeen++;
|
||||
|
@ -277,26 +253,4 @@ public class PayloadScoreQuery extends SpanQuery {
|
|||
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
private static class SimilarityPayloadDecoder implements PayloadDecoder {
|
||||
|
||||
final Similarity.SimScorer docScorer;
|
||||
|
||||
public SimilarityPayloadDecoder(Similarity.SimScorer docScorer) {
|
||||
this.docScorer = docScorer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float computePayloadFactor(int docID, int startPosition, int endPosition, BytesRef payload) {
|
||||
if (payload == null)
|
||||
return 0;
|
||||
return docScorer.computePayloadFactor(docID, startPosition, endPosition, payload);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float computePayloadFactor(BytesRef payload) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,327 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.queries.payloads;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.analysis.TokenFilter;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.NoMergePolicy;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.CollectionStatistics;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.QueryUtils;
|
||||
import org.apache.lucene.search.TermStatistics;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.WildcardQuery;
|
||||
import org.apache.lucene.search.similarities.ClassicSimilarity;
|
||||
import org.apache.lucene.search.spans.SpanContainingQuery;
|
||||
import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
|
||||
import org.apache.lucene.search.spans.SpanNearQuery;
|
||||
import org.apache.lucene.search.spans.SpanOrQuery;
|
||||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.English;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestDeprecatedPayloadScoreQuery extends LuceneTestCase {
|
||||
|
||||
private static void checkQuery(SpanQuery query, PayloadFunction function, int[] expectedDocs, float[] expectedScores) throws IOException {
|
||||
checkQuery(query, function, true, expectedDocs, expectedScores);
|
||||
}
|
||||
|
||||
private static void checkQuery(SpanQuery query, PayloadFunction function, boolean includeSpanScore, int[] expectedDocs, float[] expectedScores) throws IOException {
|
||||
|
||||
assertTrue("Expected docs and scores arrays must be the same length!", expectedDocs.length == expectedScores.length);
|
||||
|
||||
PayloadScoreQuery psq = new PayloadScoreQuery(query, function, includeSpanScore);
|
||||
TopDocs hits = searcher.search(psq, expectedDocs.length);
|
||||
|
||||
for (int i = 0; i < hits.scoreDocs.length; i++) {
|
||||
if (i > expectedDocs.length - 1)
|
||||
fail("Unexpected hit in document " + hits.scoreDocs[i].doc);
|
||||
if (hits.scoreDocs[i].doc != expectedDocs[i])
|
||||
fail("Unexpected hit in document " + hits.scoreDocs[i].doc);
|
||||
assertEquals("Bad score in document " + expectedDocs[i], expectedScores[i], hits.scoreDocs[i].score, 0.000001);
|
||||
}
|
||||
|
||||
if (hits.scoreDocs.length > expectedDocs.length)
|
||||
fail("Unexpected hit in document " + hits.scoreDocs[expectedDocs.length]);
|
||||
|
||||
QueryUtils.check(random(), psq, searcher);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTermQuery() throws IOException {
|
||||
|
||||
SpanTermQuery q = new SpanTermQuery(new Term("field", "eighteen"));
|
||||
for (PayloadFunction fn
|
||||
: new PayloadFunction[]{ new AveragePayloadFunction(), new MaxPayloadFunction(), new MinPayloadFunction() }) {
|
||||
checkQuery(q, fn, new int[]{ 118, 218, 18 },
|
||||
new float[] { 4.0f, 4.0f, 2.0f });
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOrQuery() throws IOException {
|
||||
|
||||
SpanOrQuery q = new SpanOrQuery(new SpanTermQuery(new Term("field", "eighteen")),
|
||||
new SpanTermQuery(new Term("field", "nineteen")));
|
||||
for (PayloadFunction fn
|
||||
: new PayloadFunction[]{ new AveragePayloadFunction(), new MaxPayloadFunction(), new MinPayloadFunction() }) {
|
||||
checkQuery(q, fn, new int[]{ 118, 119, 218, 219, 18, 19 },
|
||||
new float[] { 4.0f, 4.0f, 4.0f, 4.0f, 2.0f, 2.0f });
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNearQuery() throws IOException {
|
||||
|
||||
// 2 4
|
||||
// twenty two
|
||||
// 2 4 4 4
|
||||
// one hundred twenty two
|
||||
|
||||
SpanNearQuery q = new SpanNearQuery(new SpanQuery[]{
|
||||
new SpanTermQuery(new Term("field", "twenty")),
|
||||
new SpanTermQuery(new Term("field", "two"))
|
||||
}, 0, true);
|
||||
|
||||
checkQuery(q, new MaxPayloadFunction(), new int[]{ 22, 122, 222 }, new float[]{ 4.0f, 4.0f, 4.0f });
|
||||
checkQuery(q, new MinPayloadFunction(), new int[]{ 122, 222, 22 }, new float[]{ 4.0f, 4.0f, 2.0f });
|
||||
checkQuery(q, new AveragePayloadFunction(), new int[] { 122, 222, 22 }, new float[] { 4.0f, 4.0f, 3.0f });
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNestedNearQuery() throws Exception {
|
||||
|
||||
// (one OR hundred) NEAR (twenty two) ~ 1
|
||||
// 2 4 4 4
|
||||
// one hundred twenty two
|
||||
// two hundred twenty two
|
||||
|
||||
SpanNearQuery q = new SpanNearQuery(new SpanQuery[]{
|
||||
new SpanOrQuery(new SpanTermQuery(new Term("field", "one")), new SpanTermQuery(new Term("field", "hundred"))),
|
||||
new SpanNearQuery(new SpanQuery[]{
|
||||
new SpanTermQuery(new Term("field", "twenty")),
|
||||
new SpanTermQuery(new Term("field", "two"))
|
||||
}, 0, true)
|
||||
}, 1, true);
|
||||
|
||||
// check includeSpanScore makes a difference here
|
||||
searcher.setSimilarity(new MultiplyingSimilarity());
|
||||
try {
|
||||
checkQuery(q, new MaxPayloadFunction(), new int[]{ 122, 222 }, new float[]{ 20.901256561279297f, 17.06580352783203f });
|
||||
checkQuery(q, new MinPayloadFunction(), new int[]{ 222, 122 }, new float[]{ 17.06580352783203f, 10.450628280639648f });
|
||||
checkQuery(q, new AveragePayloadFunction(), new int[] { 122, 222 }, new float[]{ 19.15948486328125f, 17.06580352783203f });
|
||||
checkQuery(q, new MaxPayloadFunction(), false, new int[]{122, 222}, new float[]{4.0f, 4.0f});
|
||||
checkQuery(q, new MinPayloadFunction(), false, new int[]{222, 122}, new float[]{4.0f, 2.0f});
|
||||
checkQuery(q, new AveragePayloadFunction(), false, new int[]{222, 122}, new float[]{4.0f, 3.666666f});
|
||||
}
|
||||
finally {
|
||||
searcher.setSimilarity(similarity);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSpanContainingQuery() throws Exception {
|
||||
|
||||
// twenty WITHIN ((one OR hundred) NEAR two)~2
|
||||
SpanContainingQuery q = new SpanContainingQuery(
|
||||
new SpanNearQuery(new SpanQuery[]{
|
||||
new SpanOrQuery(new SpanTermQuery(new Term("field", "one")), new SpanTermQuery(new Term("field", "hundred"))),
|
||||
new SpanTermQuery(new Term("field", "two"))
|
||||
}, 2, true),
|
||||
new SpanTermQuery(new Term("field", "twenty"))
|
||||
);
|
||||
|
||||
checkQuery(q, new AveragePayloadFunction(), new int[] { 222, 122 }, new float[]{ 4.0f, 3.666666f });
|
||||
checkQuery(q, new MaxPayloadFunction(), new int[]{ 122, 222 }, new float[]{ 4.0f, 4.0f });
|
||||
checkQuery(q, new MinPayloadFunction(), new int[]{ 222, 122 }, new float[]{ 4.0f, 2.0f });
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEquality() {
|
||||
SpanQuery sq1 = new SpanTermQuery(new Term("field", "one"));
|
||||
SpanQuery sq2 = new SpanTermQuery(new Term("field", "two"));
|
||||
PayloadFunction minFunc = new MinPayloadFunction();
|
||||
PayloadFunction maxFunc = new MaxPayloadFunction();
|
||||
PayloadScoreQuery query1 = new PayloadScoreQuery(sq1, minFunc, true);
|
||||
PayloadScoreQuery query2 = new PayloadScoreQuery(sq2, minFunc, true);
|
||||
PayloadScoreQuery query3 = new PayloadScoreQuery(sq2, maxFunc, true);
|
||||
PayloadScoreQuery query4 = new PayloadScoreQuery(sq2, maxFunc, false);
|
||||
PayloadScoreQuery query5 = new PayloadScoreQuery(sq1, minFunc);
|
||||
|
||||
assertEquals(query1, query5);
|
||||
assertFalse(query1.equals(query2));
|
||||
assertFalse(query1.equals(query3));
|
||||
assertFalse(query1.equals(query4));
|
||||
assertFalse(query2.equals(query3));
|
||||
assertFalse(query2.equals(query4));
|
||||
assertFalse(query3.equals(query4));
|
||||
}
|
||||
|
||||
public void testRewrite() throws IOException {
|
||||
SpanMultiTermQueryWrapper xyz = new SpanMultiTermQueryWrapper(new WildcardQuery(new Term("field", "xyz*")));
|
||||
PayloadScoreQuery psq = new PayloadScoreQuery(xyz, new AveragePayloadFunction(), false);
|
||||
|
||||
// if query wasn't rewritten properly, the query would have failed with "Rewrite first!"
|
||||
searcher.search(psq, 1);
|
||||
}
|
||||
|
||||
|
||||
private static IndexSearcher searcher;
|
||||
private static IndexReader reader;
|
||||
private static Directory directory;
|
||||
private static JustScorePayloadSimilarity similarity = new JustScorePayloadSimilarity();
|
||||
private static byte[] payload2 = new byte[]{2};
|
||||
private static byte[] payload4 = new byte[]{4};
|
||||
|
||||
private static class PayloadAnalyzer extends Analyzer {
|
||||
@Override
|
||||
public TokenStreamComponents createComponents(String fieldName) {
|
||||
Tokenizer result = new MockTokenizer(MockTokenizer.SIMPLE, true);
|
||||
return new TokenStreamComponents(result, new PayloadFilter(result));
|
||||
}
|
||||
}
|
||||
|
||||
private static class PayloadFilter extends TokenFilter {
|
||||
|
||||
private int numSeen = 0;
|
||||
private final PayloadAttribute payAtt;
|
||||
|
||||
public PayloadFilter(TokenStream input) {
|
||||
super(input);
|
||||
payAtt = addAttribute(PayloadAttribute.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean incrementToken() throws IOException {
|
||||
boolean result = false;
|
||||
if (input.incrementToken()) {
|
||||
if (numSeen % 4 == 0) {
|
||||
payAtt.setPayload(new BytesRef(payload2));
|
||||
} else {
|
||||
payAtt.setPayload(new BytesRef(payload4));
|
||||
}
|
||||
numSeen++;
|
||||
result = true;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() throws IOException {
|
||||
super.reset();
|
||||
this.numSeen = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
directory = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), directory,
|
||||
newIndexWriterConfig(new PayloadAnalyzer())
|
||||
.setMergePolicy(NoMergePolicy.INSTANCE)
|
||||
.setSimilarity(similarity));
|
||||
//writer.infoStream = System.out;
|
||||
for (int i = 0; i < 300; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(newTextField("field", English.intToEnglish(i), Field.Store.YES));
|
||||
String txt = English.intToEnglish(i) +' '+English.intToEnglish(i+1);
|
||||
doc.add(newTextField("field2", txt, Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
reader = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
searcher = newSearcher(reader);
|
||||
searcher.setSimilarity(similarity);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
searcher = null;
|
||||
reader.close();
|
||||
reader = null;
|
||||
directory.close();
|
||||
directory = null;
|
||||
}
|
||||
|
||||
static class MultiplyingSimilarity extends ClassicSimilarity {
|
||||
|
||||
@Override
|
||||
public float scorePayload(int docId, int start, int end, BytesRef payload) {
|
||||
//we know it is size 4 here, so ignore the offset/length
|
||||
return payload.bytes[payload.offset];
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class JustScorePayloadSimilarity extends MultiplyingSimilarity {
|
||||
|
||||
//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
//Make everything else 1 so we see the effect of the payload
|
||||
//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
@Override
|
||||
public float lengthNorm(int length) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float sloppyFreq(int distance) {
|
||||
return 1.0f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float tf(float freq) {
|
||||
return 1.0f;
|
||||
}
|
||||
|
||||
// idf used for phrase queries
|
||||
@Override
|
||||
public Explanation idfExplain(CollectionStatistics collectionStats, TermStatistics[] termStats) {
|
||||
return Explanation.match(1.0f, "Inexplicable");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation idfExplain(CollectionStatistics collectionStats, TermStatistics termStats) {
|
||||
return Explanation.match(1.0f, "Inexplicable");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -50,7 +50,7 @@ public class TestPayloadExplanations extends BaseExplanationTestCase {
|
|||
|
||||
/** macro for payloadscorequery */
|
||||
private SpanQuery pt(String s, PayloadFunction fn) {
|
||||
return new PayloadScoreQuery(new SpanTermQuery(new Term(FIELD,s)), fn, random().nextBoolean());
|
||||
return new PayloadScoreQuery(new SpanTermQuery(new Term(FIELD,s)), fn, PayloadDecoder.FLOAT_DECODER, random().nextBoolean());
|
||||
}
|
||||
|
||||
/* simple PayloadTermQueries */
|
||||
|
@ -92,7 +92,7 @@ public class TestPayloadExplanations extends BaseExplanationTestCase {
|
|||
|
||||
public void testAllFunctions(SpanQuery query, int[] expected) throws Exception {
|
||||
for (PayloadFunction fn : functions) {
|
||||
qtest(new PayloadScoreQuery(query, fn, random().nextBoolean()), expected);
|
||||
qtest(new PayloadScoreQuery(query, fn, PayloadDecoder.FLOAT_DECODER, random().nextBoolean()), expected);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -149,7 +149,7 @@ public class TestPayloadTermQuery extends LuceneTestCase {
|
|||
|
||||
public void test() throws IOException {
|
||||
SpanQuery query = new PayloadScoreQuery(new SpanTermQuery(new Term("field", "seventy")),
|
||||
new MaxPayloadFunction());
|
||||
new MaxPayloadFunction(), PayloadDecoder.FLOAT_DECODER);
|
||||
TopDocs hits = searcher.search(query, 100);
|
||||
assertTrue("hits is null and it shouldn't be", hits != null);
|
||||
assertTrue("hits Size: " + hits.totalHits + " is not: " + 100, hits.totalHits == 100);
|
||||
|
@ -175,7 +175,7 @@ public class TestPayloadTermQuery extends LuceneTestCase {
|
|||
|
||||
public void testQuery() {
|
||||
SpanQuery boostingFuncTermQuery = new PayloadScoreQuery(new SpanTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy")),
|
||||
new MaxPayloadFunction());
|
||||
new MaxPayloadFunction(), PayloadDecoder.FLOAT_DECODER);
|
||||
QueryUtils.check(boostingFuncTermQuery);
|
||||
|
||||
SpanTermQuery spanTermQuery = new SpanTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"));
|
||||
|
@ -183,14 +183,14 @@ public class TestPayloadTermQuery extends LuceneTestCase {
|
|||
assertTrue(boostingFuncTermQuery.equals(spanTermQuery) == spanTermQuery.equals(boostingFuncTermQuery));
|
||||
|
||||
SpanQuery boostingFuncTermQuery2 = new PayloadScoreQuery(new SpanTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy")),
|
||||
new AveragePayloadFunction());
|
||||
new AveragePayloadFunction(), PayloadDecoder.FLOAT_DECODER);
|
||||
|
||||
QueryUtils.checkUnequal(boostingFuncTermQuery, boostingFuncTermQuery2);
|
||||
}
|
||||
|
||||
public void testMultipleMatchesPerDoc() throws Exception {
|
||||
SpanQuery query = new PayloadScoreQuery(new SpanTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy")),
|
||||
new MaxPayloadFunction());
|
||||
new MaxPayloadFunction(), PayloadDecoder.FLOAT_DECODER);
|
||||
TopDocs hits = searcher.search(query, 100);
|
||||
assertTrue("hits is null and it shouldn't be", hits != null);
|
||||
assertTrue("hits Size: " + hits.totalHits + " is not: " + 100, hits.totalHits == 100);
|
||||
|
@ -229,7 +229,7 @@ public class TestPayloadTermQuery extends LuceneTestCase {
|
|||
|
||||
public void testNoMatch() throws Exception {
|
||||
SpanQuery query = new PayloadScoreQuery(new SpanTermQuery(new Term(PayloadHelper.FIELD, "junk")),
|
||||
new MaxPayloadFunction());
|
||||
new MaxPayloadFunction(), PayloadDecoder.FLOAT_DECODER);
|
||||
TopDocs hits = searcher.search(query, 100);
|
||||
assertTrue("hits is null and it shouldn't be", hits != null);
|
||||
assertTrue("hits Size: " + hits.totalHits + " is not: " + 0, hits.totalHits == 0);
|
||||
|
@ -238,9 +238,9 @@ public class TestPayloadTermQuery extends LuceneTestCase {
|
|||
|
||||
public void testNoPayload() throws Exception {
|
||||
SpanQuery q1 = new PayloadScoreQuery(new SpanTermQuery(new Term(PayloadHelper.NO_PAYLOAD_FIELD, "zero")),
|
||||
new MaxPayloadFunction());
|
||||
new MaxPayloadFunction(), PayloadDecoder.FLOAT_DECODER);
|
||||
SpanQuery q2 = new PayloadScoreQuery(new SpanTermQuery(new Term(PayloadHelper.NO_PAYLOAD_FIELD, "foo")),
|
||||
new MaxPayloadFunction());
|
||||
new MaxPayloadFunction(), PayloadDecoder.FLOAT_DECODER);
|
||||
BooleanClause c1 = new BooleanClause(q1, BooleanClause.Occur.MUST);
|
||||
BooleanClause c2 = new BooleanClause(q2, BooleanClause.Occur.MUST_NOT);
|
||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||
|
|
|
@ -17,10 +17,11 @@
|
|||
package org.apache.lucene.queryparser.xml.builders;
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.queries.payloads.AveragePayloadFunction;
|
||||
import org.apache.lucene.queries.payloads.PayloadDecoder;
|
||||
import org.apache.lucene.queries.payloads.PayloadScoreQuery;
|
||||
import org.apache.lucene.queryparser.xml.DOMUtils;
|
||||
import org.apache.lucene.queryparser.xml.ParserException;
|
||||
import org.apache.lucene.queries.payloads.AveragePayloadFunction;
|
||||
import org.apache.lucene.queries.payloads.PayloadScoreQuery;
|
||||
import org.apache.lucene.search.spans.SpanBoostQuery;
|
||||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
|
@ -36,8 +37,9 @@ public class BoostingTermBuilder extends SpanBuilderBase {
|
|||
String fieldName = DOMUtils.getAttributeWithInheritanceOrFail(e, "fieldName");
|
||||
String value = DOMUtils.getNonBlankTextOrFail(e);
|
||||
|
||||
// TODO make function and decoder pluggable somehow?
|
||||
SpanQuery btq = new PayloadScoreQuery(new SpanTermQuery(new Term(fieldName, value)),
|
||||
new AveragePayloadFunction());
|
||||
new AveragePayloadFunction(), PayloadDecoder.FLOAT_DECODER);
|
||||
btq = new SpanBoostQuery(btq, DOMUtils.getAttribute(e, "boost", 1.0f));
|
||||
return btq;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue