LUCENE-2200: final classes had non-overriding protected members

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@897707 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2010-01-10 21:09:58 +00:00
parent 673e368bf7
commit 7d5844740e
8 changed files with 25 additions and 31 deletions

View File

@ -58,6 +58,10 @@ API Changes
which reader the int doc, passed to customScore, refers to. (Paul which reader the int doc, passed to customScore, refers to. (Paul
chez Jamespot via Mike McCandless) chez Jamespot via Mike McCandless)
* LUCENE-2200: Several final classes had non-overriding protected
members. These were converted to private and unused protected
constructors removed. (Steven Rowe via Robert Muir)
Bug fixes Bug fixes
* LUCENE-2092: BooleanQuery was ignoring disableCoord in its hashCode * LUCENE-2092: BooleanQuery was ignoring disableCoord in its hashCode

View File

@ -30,8 +30,8 @@ import org.apache.lucene.analysis.tokenattributes.TermAttribute;
public final class ArabicNormalizationFilter extends TokenFilter { public final class ArabicNormalizationFilter extends TokenFilter {
protected ArabicNormalizer normalizer = null; private final ArabicNormalizer normalizer;
private TermAttribute termAtt; private final TermAttribute termAtt;
public ArabicNormalizationFilter(TokenStream input) { public ArabicNormalizationFilter(TokenStream input) {
super(input); super(input);

View File

@ -64,22 +64,15 @@ public final class EdgeNGramTokenFilter extends TokenFilter {
} }
} }
private int minGram; private final int minGram;
private int maxGram; private final int maxGram;
private Side side; private Side side;
private char[] curTermBuffer; private char[] curTermBuffer;
private int curTermLength; private int curTermLength;
private int curGramSize; private int curGramSize;
private TermAttribute termAtt; private final TermAttribute termAtt;
private OffsetAttribute offsetAtt; private final OffsetAttribute offsetAtt;
protected EdgeNGramTokenFilter(TokenStream input) {
super(input);
this.termAtt = addAttribute(TermAttribute.class);
this.offsetAtt = addAttribute(OffsetAttribute.class);
}
/** /**
* Creates EdgeNGramTokenFilter that can generate n-grams in the sizes of the given range * Creates EdgeNGramTokenFilter that can generate n-grams in the sizes of the given range

View File

@ -38,17 +38,10 @@ import org.apache.lucene.analysis.tokenattributes.TermAttribute;
*/ */
public final class DelimitedPayloadTokenFilter extends TokenFilter { public final class DelimitedPayloadTokenFilter extends TokenFilter {
public static final char DEFAULT_DELIMITER = '|'; public static final char DEFAULT_DELIMITER = '|';
protected char delimiter = DEFAULT_DELIMITER; private final char delimiter;
protected TermAttribute termAtt; private final TermAttribute termAtt;
protected PayloadAttribute payAtt; private final PayloadAttribute payAtt;
protected PayloadEncoder encoder; private final PayloadEncoder encoder;
/**
* Construct a token stream filtering the given input.
*/
protected DelimitedPayloadTokenFilter(TokenStream input) {
this(input, DEFAULT_DELIMITER, new IdentityEncoder());
}
public DelimitedPayloadTokenFilter(TokenStream input, char delimiter, PayloadEncoder encoder) { public DelimitedPayloadTokenFilter(TokenStream input, char delimiter, PayloadEncoder encoder) {

View File

@ -33,9 +33,9 @@ import org.apache.lucene.util.Version;
*/ */
public final class ShingleAnalyzerWrapper extends Analyzer { public final class ShingleAnalyzerWrapper extends Analyzer {
protected Analyzer defaultAnalyzer; private final Analyzer defaultAnalyzer;
protected int maxShingleSize = 2; private int maxShingleSize = 2;
protected boolean outputUnigrams = true; private boolean outputUnigrams = true;
public ShingleAnalyzerWrapper(Analyzer defaultAnalyzer) { public ShingleAnalyzerWrapper(Analyzer defaultAnalyzer) {
super(); super();

View File

@ -34,7 +34,9 @@ public class DelimitedPayloadTokenFilterTest extends LuceneTestCase {
public void testPayloads() throws Exception { public void testPayloads() throws Exception {
String test = "The quick|JJ red|JJ fox|NN jumped|VB over the lazy|JJ brown|JJ dogs|NN"; String test = "The quick|JJ red|JJ fox|NN jumped|VB over the lazy|JJ brown|JJ dogs|NN";
DelimitedPayloadTokenFilter filter = new DelimitedPayloadTokenFilter(new WhitespaceTokenizer(new StringReader(test))); DelimitedPayloadTokenFilter filter = new DelimitedPayloadTokenFilter
(new WhitespaceTokenizer(new StringReader(test)),
DelimitedPayloadTokenFilter.DEFAULT_DELIMITER, new IdentityEncoder());
TermAttribute termAtt = filter.getAttribute(TermAttribute.class); TermAttribute termAtt = filter.getAttribute(TermAttribute.class);
PayloadAttribute payAtt = filter.getAttribute(PayloadAttribute.class); PayloadAttribute payAtt = filter.getAttribute(PayloadAttribute.class);
assertTermEquals("The", filter, termAtt, payAtt, null); assertTermEquals("The", filter, termAtt, payAtt, null);
@ -53,7 +55,9 @@ public class DelimitedPayloadTokenFilterTest extends LuceneTestCase {
public void testNext() throws Exception { public void testNext() throws Exception {
String test = "The quick|JJ red|JJ fox|NN jumped|VB over the lazy|JJ brown|JJ dogs|NN"; String test = "The quick|JJ red|JJ fox|NN jumped|VB over the lazy|JJ brown|JJ dogs|NN";
DelimitedPayloadTokenFilter filter = new DelimitedPayloadTokenFilter(new WhitespaceTokenizer(new StringReader(test))); DelimitedPayloadTokenFilter filter = new DelimitedPayloadTokenFilter
(new WhitespaceTokenizer(new StringReader(test)),
DelimitedPayloadTokenFilter.DEFAULT_DELIMITER, new IdentityEncoder());
assertTermEquals("The", filter, null); assertTermEquals("The", filter, null);
assertTermEquals("quick", filter, "JJ".getBytes("UTF-8")); assertTermEquals("quick", filter, "JJ".getBytes("UTF-8"));
assertTermEquals("red", filter, "JJ".getBytes("UTF-8")); assertTermEquals("red", filter, "JJ".getBytes("UTF-8"));

View File

@ -28,7 +28,7 @@ import java.io.Reader;
*/ */
public final class CharReader extends CharStream { public final class CharReader extends CharStream {
protected Reader input; private final Reader input;
public static CharStream get(Reader input) { public static CharStream get(Reader input) {
return input instanceof CharStream ? return input instanceof CharStream ?

View File

@ -164,7 +164,7 @@ final class FieldsReader implements Cloneable {
/** /**
* @throws AlreadyClosedException if this FieldsReader is closed * @throws AlreadyClosedException if this FieldsReader is closed
*/ */
protected final void ensureOpen() throws AlreadyClosedException { private void ensureOpen() throws AlreadyClosedException {
if (closed) { if (closed) {
throw new AlreadyClosedException("this FieldsReader is closed"); throw new AlreadyClosedException("this FieldsReader is closed");
} }