mirror of https://github.com/apache/lucene.git
tiny whitespace and javadoc fixes
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@347992 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
bfde3257dc
commit
dea69e3360
|
@ -17,8 +17,8 @@ package org.apache.lucene.analysis;
|
|||
*/
|
||||
|
||||
/**
|
||||
* A filter that replaces accented characters in the ISO Latin 1 character set by
|
||||
* their unaccented equivalent. The case will not be altered.
|
||||
* A filter that replaces accented characters in the ISO Latin 1 character set
|
||||
* (ISO-8859-1) by their unaccented equivalent. The case will not be altered.
|
||||
* <p>
|
||||
* For instance, 'à' will be replaced by 'a'.
|
||||
* <p>
|
||||
|
|
|
@ -23,7 +23,8 @@ import java.io.Reader;
|
|||
* Emits the entire input as a single token.
|
||||
*/
|
||||
public class KeywordTokenizer extends Tokenizer {
|
||||
private static final int DEFAULT_BUFFER_SIZE=256;
|
||||
|
||||
private static final int DEFAULT_BUFFER_SIZE = 256;
|
||||
|
||||
private boolean done;
|
||||
private final char[] buffer;
|
||||
|
@ -34,8 +35,8 @@ public class KeywordTokenizer extends Tokenizer {
|
|||
|
||||
public KeywordTokenizer(Reader input, int bufferSize) {
|
||||
super(input);
|
||||
this.buffer=new char[bufferSize];
|
||||
this.done=false;
|
||||
this.buffer = new char[bufferSize];
|
||||
this.done = false;
|
||||
}
|
||||
|
||||
public Token next() throws IOException {
|
||||
|
|
|
@ -37,7 +37,7 @@ public final class LengthFilter extends TokenFilter {
|
|||
{
|
||||
super(in);
|
||||
this.min = min;
|
||||
this.max =max;
|
||||
this.max = max;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -49,7 +49,7 @@ public final class LengthFilter extends TokenFilter {
|
|||
for (Token token = input.next(); token != null; token = input.next())
|
||||
{
|
||||
int len = token.termText().length();
|
||||
if ( len >= min && len <= max) {
|
||||
if (len >= min && len <= max) {
|
||||
return token;
|
||||
}
|
||||
// note: else we ignore it but should we index each part of it?
|
||||
|
|
Loading…
Reference in New Issue