The variable was not used but initialized. This was caused by merge from 2.9, fixed to use the final boolean.

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@830456 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Uwe Schindler 2009-10-28 08:01:55 +00:00
parent c0f3e62ed4
commit 578761c635

View File

@ -70,7 +70,7 @@ public class StandardAnalyzer extends Analyzer {
public StandardAnalyzer(Version matchVersion, Set<?> stopWords) { public StandardAnalyzer(Version matchVersion, Set<?> stopWords) {
stopSet = stopWords; stopSet = stopWords;
setOverridesTokenStreamMethod(StandardAnalyzer.class); setOverridesTokenStreamMethod(StandardAnalyzer.class);
enableStopPositionIncrements = matchVersion.onOrAfter(Version.LUCENE_29); enableStopPositionIncrements = StopFilter.getEnablePositionIncrementsVersionDefault(matchVersion);
replaceInvalidAcronym = matchVersion.onOrAfter(Version.LUCENE_24); replaceInvalidAcronym = matchVersion.onOrAfter(Version.LUCENE_24);
this.matchVersion = matchVersion; this.matchVersion = matchVersion;
} }
@ -95,13 +95,13 @@ public class StandardAnalyzer extends Analyzer {
/** Constructs a {@link StandardTokenizer} filtered by a {@link /** Constructs a {@link StandardTokenizer} filtered by a {@link
StandardFilter}, a {@link LowerCaseFilter} and a {@link StopFilter}. */ StandardFilter}, a {@link LowerCaseFilter} and a {@link StopFilter}. */
@Override
public TokenStream tokenStream(String fieldName, Reader reader) { public TokenStream tokenStream(String fieldName, Reader reader) {
StandardTokenizer tokenStream = new StandardTokenizer(matchVersion, reader); StandardTokenizer tokenStream = new StandardTokenizer(matchVersion, reader);
tokenStream.setMaxTokenLength(maxTokenLength); tokenStream.setMaxTokenLength(maxTokenLength);
TokenStream result = new StandardFilter(tokenStream); TokenStream result = new StandardFilter(tokenStream);
result = new LowerCaseFilter(result); result = new LowerCaseFilter(result);
result = new StopFilter(StopFilter.getEnablePositionIncrementsVersionDefault(matchVersion), result = new StopFilter(enableStopPositionIncrements, result, stopSet);
result, stopSet);
return result; return result;
} }
@ -132,6 +132,7 @@ public class StandardAnalyzer extends Analyzer {
return maxTokenLength; return maxTokenLength;
} }
@Override
public TokenStream reusableTokenStream(String fieldName, Reader reader) throws IOException { public TokenStream reusableTokenStream(String fieldName, Reader reader) throws IOException {
if (overridesTokenStreamMethod) { if (overridesTokenStreamMethod) {
// LUCENE-1678: force fallback to tokenStream() if we // LUCENE-1678: force fallback to tokenStream() if we
@ -146,7 +147,7 @@ public class StandardAnalyzer extends Analyzer {
streams.tokenStream = new StandardTokenizer(matchVersion, reader); streams.tokenStream = new StandardTokenizer(matchVersion, reader);
streams.filteredTokenStream = new StandardFilter(streams.tokenStream); streams.filteredTokenStream = new StandardFilter(streams.tokenStream);
streams.filteredTokenStream = new LowerCaseFilter(streams.filteredTokenStream); streams.filteredTokenStream = new LowerCaseFilter(streams.filteredTokenStream);
streams.filteredTokenStream = new StopFilter(StopFilter.getEnablePositionIncrementsVersionDefault(matchVersion), streams.filteredTokenStream = new StopFilter(enableStopPositionIncrements,
streams.filteredTokenStream, stopSet); streams.filteredTokenStream, stopSet);
} else { } else {
streams.tokenStream.reset(reader); streams.tokenStream.reset(reader);