LUCENE-2200: final classes had non-overriding protected members

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@897707 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2010-01-10 21:09:58 +00:00
parent 673e368bf7
commit 7d5844740e
8 changed files with 25 additions and 31 deletions

View File

@ -58,6 +58,10 @@ API Changes
which reader the int doc, passed to customScore, refers to. (Paul
chez Jamespot via Mike McCandless)
* LUCENE-2200: Several final classes had non-overriding protected
members. These were converted to private and unused protected
constructors removed. (Steven Rowe via Robert Muir)
Bug fixes
* LUCENE-2092: BooleanQuery was ignoring disableCoord in its hashCode

View File

@ -30,8 +30,8 @@ import org.apache.lucene.analysis.tokenattributes.TermAttribute;
public final class ArabicNormalizationFilter extends TokenFilter {
protected ArabicNormalizer normalizer = null;
private TermAttribute termAtt;
private final ArabicNormalizer normalizer;
private final TermAttribute termAtt;
public ArabicNormalizationFilter(TokenStream input) {
super(input);

View File

@ -64,22 +64,15 @@ public final class EdgeNGramTokenFilter extends TokenFilter {
}
}
private int minGram;
private int maxGram;
private final int minGram;
private final int maxGram;
private Side side;
private char[] curTermBuffer;
private int curTermLength;
private int curGramSize;
private TermAttribute termAtt;
private OffsetAttribute offsetAtt;
protected EdgeNGramTokenFilter(TokenStream input) {
super(input);
this.termAtt = addAttribute(TermAttribute.class);
this.offsetAtt = addAttribute(OffsetAttribute.class);
}
private final TermAttribute termAtt;
private final OffsetAttribute offsetAtt;
/**
* Creates EdgeNGramTokenFilter that can generate n-grams in the sizes of the given range

View File

@ -38,17 +38,10 @@ import org.apache.lucene.analysis.tokenattributes.TermAttribute;
*/
public final class DelimitedPayloadTokenFilter extends TokenFilter {
public static final char DEFAULT_DELIMITER = '|';
protected char delimiter = DEFAULT_DELIMITER;
protected TermAttribute termAtt;
protected PayloadAttribute payAtt;
protected PayloadEncoder encoder;
/**
* Construct a token stream filtering the given input.
*/
protected DelimitedPayloadTokenFilter(TokenStream input) {
this(input, DEFAULT_DELIMITER, new IdentityEncoder());
}
private final char delimiter;
private final TermAttribute termAtt;
private final PayloadAttribute payAtt;
private final PayloadEncoder encoder;
public DelimitedPayloadTokenFilter(TokenStream input, char delimiter, PayloadEncoder encoder) {

View File

@ -33,9 +33,9 @@ import org.apache.lucene.util.Version;
*/
public final class ShingleAnalyzerWrapper extends Analyzer {
protected Analyzer defaultAnalyzer;
protected int maxShingleSize = 2;
protected boolean outputUnigrams = true;
private final Analyzer defaultAnalyzer;
private int maxShingleSize = 2;
private boolean outputUnigrams = true;
public ShingleAnalyzerWrapper(Analyzer defaultAnalyzer) {
super();

View File

@ -34,7 +34,9 @@ public class DelimitedPayloadTokenFilterTest extends LuceneTestCase {
public void testPayloads() throws Exception {
String test = "The quick|JJ red|JJ fox|NN jumped|VB over the lazy|JJ brown|JJ dogs|NN";
DelimitedPayloadTokenFilter filter = new DelimitedPayloadTokenFilter(new WhitespaceTokenizer(new StringReader(test)));
DelimitedPayloadTokenFilter filter = new DelimitedPayloadTokenFilter
(new WhitespaceTokenizer(new StringReader(test)),
DelimitedPayloadTokenFilter.DEFAULT_DELIMITER, new IdentityEncoder());
TermAttribute termAtt = filter.getAttribute(TermAttribute.class);
PayloadAttribute payAtt = filter.getAttribute(PayloadAttribute.class);
assertTermEquals("The", filter, termAtt, payAtt, null);
@ -53,7 +55,9 @@ public class DelimitedPayloadTokenFilterTest extends LuceneTestCase {
public void testNext() throws Exception {
String test = "The quick|JJ red|JJ fox|NN jumped|VB over the lazy|JJ brown|JJ dogs|NN";
DelimitedPayloadTokenFilter filter = new DelimitedPayloadTokenFilter(new WhitespaceTokenizer(new StringReader(test)));
DelimitedPayloadTokenFilter filter = new DelimitedPayloadTokenFilter
(new WhitespaceTokenizer(new StringReader(test)),
DelimitedPayloadTokenFilter.DEFAULT_DELIMITER, new IdentityEncoder());
assertTermEquals("The", filter, null);
assertTermEquals("quick", filter, "JJ".getBytes("UTF-8"));
assertTermEquals("red", filter, "JJ".getBytes("UTF-8"));

View File

@ -28,7 +28,7 @@ import java.io.Reader;
*/
public final class CharReader extends CharStream {
protected Reader input;
private final Reader input;
public static CharStream get(Reader input) {
return input instanceof CharStream ?

View File

@ -164,7 +164,7 @@ final class FieldsReader implements Cloneable {
/**
* @throws AlreadyClosedException if this FieldsReader is closed
*/
protected final void ensureOpen() throws AlreadyClosedException {
private void ensureOpen() throws AlreadyClosedException {
if (closed) {
throw new AlreadyClosedException("this FieldsReader is closed");
}