mirror of https://github.com/apache/lucene.git
LUCENE-1773: fix highlighter test case; add contrib/memory as dependency to contrib/benchmark
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@801160 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
fe43cd7243
commit
c471e8e83c
|
@ -8,8 +8,15 @@ $Id:$
|
|||
LUCENE-1770: Add EnwikiQueryMaker (Mark Miller)
|
||||
|
||||
8/04/2009
|
||||
LUCENE-1773: Add FastVectorHighlighter tasks. (Koji Sekiguchi via
|
||||
Mike McCandless)
|
||||
LUCENE-1773: Add FastVectorHighlighter tasks. This change is a
|
||||
non-backwards compatible change in how subclasses of ReadTask define
|
||||
a highlighter. The methods doHighlight, isMergeContiguousFragments,
|
||||
maxNumFragments and getHighlighter are no longer used and have been
|
||||
mark deprecated and package protected private so there's a compile
|
||||
time error. Instead, the new getBenchmarkHighlighter method should
|
||||
return an appropriate highlighter for the task. The configuration of
|
||||
the highlighter tasks (maxFrags, mergeContiguous, etc.) is now
|
||||
accepted as params to the task. (Koji Sekiguchi via Mike McCandless)
|
||||
|
||||
8/03/2009
|
||||
LUCENE-1778: Add support for log.step setting per task type. Perviously, if
|
||||
|
|
|
@ -105,6 +105,7 @@
|
|||
<pathelement path="${common.dir}/build/classes/java"/>
|
||||
<pathelement path="${common.dir}/build/classes/demo"/>
|
||||
<pathelement path="${common.dir}/build/contrib/highlighter/classes/java"/>
|
||||
<pathelement path="${common.dir}/build/contrib/memory/classes/java"/>
|
||||
<pathelement path="${common.dir}/build/contrib/fast-vector-highlighter/classes/java"/>
|
||||
<fileset dir="lib">
|
||||
<include name="**/*.jar"/>
|
||||
|
@ -149,13 +150,18 @@
|
|||
<fileset dir="${common.dir}/contrib/highlighter" includes="build.xml"/>
|
||||
</subant>
|
||||
</target>
|
||||
<target name="compile-memory">
|
||||
<subant target="compile">
|
||||
<fileset dir="${common.dir}/contrib/memory" includes="build.xml"/>
|
||||
</subant>
|
||||
</target>
|
||||
<target name="compile-vector-highlighter">
|
||||
<subant target="compile">
|
||||
<fileset dir="${common.dir}/contrib/fast-vector-highlighter" includes="build.xml"/>
|
||||
</subant>
|
||||
</target>
|
||||
|
||||
<target name="init" depends="common.init,compile-demo,compile-highlighter,compile-vector-highlighter,check-files"/>
|
||||
<target name="init" depends="common.init,compile-demo,compile-memory,compile-highlighter,compile-vector-highlighter,check-files"/>
|
||||
|
||||
<!-- make sure online collections (reuters) are first downloaded -->
|
||||
<target name="test" depends="init,get-files">
|
||||
|
|
|
@ -38,10 +38,6 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.highlight.Highlighter;
|
||||
import org.apache.lucene.search.highlight.QueryTermScorer;
|
||||
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
|
||||
import org.apache.lucene.search.highlight.TextFragment;
|
||||
import org.apache.lucene.search.highlight.TokenSources;
|
||||
import org.apache.lucene.search.highlight.InvalidTokenOffsetsException;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
||||
|
@ -122,10 +118,8 @@ public abstract class ReadTask extends PerfTask {
|
|||
int numHighlight = Math.min(numToHighlight(), scoreDocs.length);
|
||||
Analyzer analyzer = getRunData().getAnalyzer();
|
||||
BenchmarkHighlighter highlighter = null;
|
||||
int maxFrags = 1;
|
||||
if (numHighlight > 0) {
|
||||
highlighter = getBenchmarkHighlighter(q);
|
||||
maxFrags = maxNumFragments();
|
||||
}
|
||||
for (int m = 0; m < traversalSize; m++) {
|
||||
int id = scoreDocs[m].doc;
|
||||
|
@ -242,37 +236,50 @@ public abstract class ReadTask extends PerfTask {
|
|||
/**
|
||||
* @deprecated Use {@link #getBenchmarkHighlighter(Query)}
|
||||
*/
|
||||
protected Highlighter getHighlighter(Query q){
|
||||
return new Highlighter(new SimpleHTMLFormatter(), new QueryTermScorer(q));
|
||||
final Highlighter getHighlighter(Query q) {
|
||||
// not called
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an appropriate highlighter to be used with
|
||||
* highlighting tasks
|
||||
*/
|
||||
protected BenchmarkHighlighter getBenchmarkHighlighter(Query q){
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return the maxiumum number of highlighter fragments
|
||||
* @deprecated Please define getBenchmarkHighlighter instead
|
||||
*/
|
||||
public int maxNumFragments(){
|
||||
final int maxNumFragments(){
|
||||
// not called -- we switched this method to final to
|
||||
// force any external subclasses to cutover to
|
||||
// getBenchmarkHighlighter instead
|
||||
return 10;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return true if the highlighter should merge contiguous fragments
|
||||
* @deprecated
|
||||
* @deprecated Please define getBenchmarkHighlighter instead
|
||||
*/
|
||||
public boolean isMergeContiguousFragments(){
|
||||
final boolean isMergeContiguousFragments(){
|
||||
// not called -- we switched this method to final to
|
||||
// force any external subclasses to cutover to
|
||||
// getBenchmarkHighlighter instead
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* @deprecated Please define getBenchmarkHighlighter instead
|
||||
*/
|
||||
protected int doHighlight(TokenStream ts, String text, Highlighter highlighter, boolean mergeContiguous, int maxFragments) throws IOException, InvalidTokenOffsetsException {
|
||||
TextFragment[] frag = highlighter.getBestTextFragments(ts, text, mergeContiguous, maxFragments);
|
||||
return frag != null ? frag.length : 0;
|
||||
final int doHighlight(TokenStream ts, String text, Highlighter highlighter, boolean mergeContiguous, int maxFragments) throws IOException, InvalidTokenOffsetsException {
|
||||
// not called -- we switched this method to final to
|
||||
// force any external subclasses to cutover to
|
||||
// getBenchmarkHighlighter instead
|
||||
return 0;
|
||||
}
|
||||
|
||||
protected Sort getSort() {
|
||||
|
|
|
@ -24,13 +24,11 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.highlight.Highlighter;
|
||||
import org.apache.lucene.search.highlight.InvalidTokenOffsetsException;
|
||||
import org.apache.lucene.search.highlight.QueryScorer;
|
||||
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
|
||||
import org.apache.lucene.search.highlight.TextFragment;
|
||||
import org.apache.lucene.search.highlight.TokenSources;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Set;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
|
@ -103,17 +101,6 @@ public class SearchTravRetHighlightTask extends SearchTravTask {
|
|||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
public boolean isMergeContiguousFragments() {
|
||||
return mergeContiguous;
|
||||
}
|
||||
|
||||
public int maxNumFragments() {
|
||||
return maxFrags;
|
||||
}
|
||||
|
||||
protected Collection/*<String>*/ getFieldsToHighlight(Document document) {
|
||||
Collection result = super.getFieldsToHighlight(document);
|
||||
//if stored is false, then result will be empty, in which case just get all the param fields
|
||||
|
|
|
@ -101,10 +101,6 @@ public class SearchTravRetVectorHighlightTask extends SearchTravTask {
|
|||
};
|
||||
}
|
||||
|
||||
public int maxNumFragments() {
|
||||
return maxFrags;
|
||||
}
|
||||
|
||||
protected Collection/*<String>*/ getFieldsToHighlight(Document document) {
|
||||
Collection result = super.getFieldsToHighlight(document);
|
||||
//if stored is false, then result will be empty, in which case just get all the param fields
|
||||
|
|
|
@ -19,9 +19,13 @@ package org.apache.lucene.benchmark.byTask.tasks;
|
|||
|
||||
import org.apache.lucene.benchmark.byTask.PerfRunData;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
|
||||
import org.apache.lucene.search.highlight.Highlighter;
|
||||
import org.apache.lucene.search.highlight.TextFragment;
|
||||
import org.apache.lucene.search.highlight.InvalidTokenOffsetsException;
|
||||
import org.apache.lucene.search.highlight.QueryScorer;
|
||||
import org.apache.lucene.search.highlight.TokenSources;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
|
||||
|
@ -47,9 +51,15 @@ public class CountingHighlighterTestTask extends SearchTravRetHighlightTask {
|
|||
return document;
|
||||
}
|
||||
|
||||
protected int doHighlight(TokenStream ts, String text, Highlighter highlighter, boolean mergeContiguous, int maxFragments) throws IOException, InvalidTokenOffsetsException {
|
||||
TextFragment[] frag = highlighter.getBestTextFragments(ts, text, mergeContiguous, maxFragments);
|
||||
numHighlightedResults += frag != null ? frag.length : 0;
|
||||
return frag != null ? frag.length : 0;
|
||||
public BenchmarkHighlighter getBenchmarkHighlighter(Query q) {
|
||||
highlighter = new Highlighter(new SimpleHTMLFormatter(), new QueryScorer(q));
|
||||
return new BenchmarkHighlighter() {
|
||||
public int doHighlight(IndexReader reader, int doc, String field, Document document, Analyzer analyzer, String text) throws Exception {
|
||||
TokenStream ts = TokenSources.getAnyTokenStream(reader, doc, field, document, analyzer);
|
||||
TextFragment[] frag = highlighter.getBestTextFragments(ts, text, mergeContiguous, maxFrags);
|
||||
numHighlightedResults += frag != null ? frag.length : 0;
|
||||
return frag != null ? frag.length : 0;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue