mirror of https://github.com/apache/lucene.git
LUCENE-1800: reusable token streams for query parser
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@803664 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
bd6bb85bb4
commit
f79712ce82
|
@ -728,6 +728,8 @@ Optimizations
|
||||||
default implementation that uses a lockless cache.
|
default implementation that uses a lockless cache.
|
||||||
(Earwin Burrfoot, yonik)
|
(Earwin Burrfoot, yonik)
|
||||||
|
|
||||||
|
13. LUCENE-1800: QueryParser should use reusable TokenStreams. (yonik)
|
||||||
|
|
||||||
|
|
||||||
Documentation
|
Documentation
|
||||||
|
|
||||||
|
|
|
@ -537,7 +537,13 @@ public class QueryParser implements QueryParserConstants {
|
||||||
// Use the analyzer to get all the tokens, and then build a TermQuery,
|
// Use the analyzer to get all the tokens, and then build a TermQuery,
|
||||||
// PhraseQuery, or nothing based on the term count
|
// PhraseQuery, or nothing based on the term count
|
||||||
|
|
||||||
TokenStream source = analyzer.tokenStream(field, new StringReader(queryText));
|
TokenStream source;
|
||||||
|
try {
|
||||||
|
source = analyzer.reusableTokenStream(field, new StringReader(queryText));
|
||||||
|
source.reset();
|
||||||
|
} catch (IOException e) {
|
||||||
|
source = analyzer.tokenStream(field, new StringReader(queryText));
|
||||||
|
}
|
||||||
CachingTokenFilter buffer = new CachingTokenFilter(source);
|
CachingTokenFilter buffer = new CachingTokenFilter(source);
|
||||||
TermAttribute termAtt = null;
|
TermAttribute termAtt = null;
|
||||||
PositionIncrementAttribute posIncrAtt = null;
|
PositionIncrementAttribute posIncrAtt = null;
|
||||||
|
|
|
@ -562,7 +562,13 @@ public class QueryParser {
|
||||||
// Use the analyzer to get all the tokens, and then build a TermQuery,
|
// Use the analyzer to get all the tokens, and then build a TermQuery,
|
||||||
// PhraseQuery, or nothing based on the term count
|
// PhraseQuery, or nothing based on the term count
|
||||||
|
|
||||||
TokenStream source = analyzer.tokenStream(field, new StringReader(queryText));
|
TokenStream source;
|
||||||
|
try {
|
||||||
|
source = analyzer.reusableTokenStream(field, new StringReader(queryText));
|
||||||
|
source.reset();
|
||||||
|
} catch (IOException e) {
|
||||||
|
source = analyzer.tokenStream(field, new StringReader(queryText));
|
||||||
|
}
|
||||||
CachingTokenFilter buffer = new CachingTokenFilter(source);
|
CachingTokenFilter buffer = new CachingTokenFilter(source);
|
||||||
TermAttribute termAtt = null;
|
TermAttribute termAtt = null;
|
||||||
PositionIncrementAttribute posIncrAtt = null;
|
PositionIncrementAttribute posIncrAtt = null;
|
||||||
|
|
Loading…
Reference in New Issue