LUCENE-3040: three more missed cases of tokenStream->reusableTokenStream

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1102827 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2011-05-13 17:50:33 +00:00
parent 5669d283ff
commit 993488ea07
2 changed files with 5 additions and 3 deletions

View File

@ -78,7 +78,7 @@ public class Highlighter
public final String getBestFragment(Analyzer analyzer, String fieldName,String text)
throws IOException, InvalidTokenOffsetsException
{
TokenStream tokenStream = analyzer.tokenStream(fieldName, new StringReader(text));
TokenStream tokenStream = analyzer.reusableTokenStream(fieldName, new StringReader(text));
return getBestFragment(tokenStream, text);
}
@ -130,7 +130,7 @@ public class Highlighter
int maxNumFragments)
throws IOException, InvalidTokenOffsetsException
{
TokenStream tokenStream = analyzer.tokenStream(fieldName, new StringReader(text));
TokenStream tokenStream = analyzer.reusableTokenStream(fieldName, new StringReader(text));
return getBestFragments(tokenStream, text, maxNumFragments);
}

View File

@ -305,11 +305,12 @@ class LuceneMethods {
int position = 0;
// Tokenize field and add to postingTable
TokenStream stream = analyzer.tokenStream(fieldName, reader);
TokenStream stream = analyzer.reusableTokenStream(fieldName, reader);
CharTermAttribute termAtt = stream.addAttribute(CharTermAttribute.class);
PositionIncrementAttribute posIncrAtt = stream.addAttribute(PositionIncrementAttribute.class);
try {
stream.reset();
while (stream.incrementToken()) {
position += (posIncrAtt.getPositionIncrement() - 1);
position++;
@ -323,6 +324,7 @@ class LuceneMethods {
}
if (position > maxFieldLength) break;
}
stream.end();
} finally {
stream.close();
}