SOLR-4275: Fix TrieTokenizer to no longer throw StringIndexOutOfBoundsException in admin UI / AnalysisRequestHandler when you enter no number to tokenize

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1429401 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Uwe Schindler 2013-01-05 22:19:14 +00:00
parent c734e6d021
commit 981694a910
2 changed files with 36 additions and 12 deletions

View File

@ -466,6 +466,10 @@ Bug Fixes
* SOLR-3829: Admin UI Logging events broken if schema.xml defines a catch-all
dynamicField with type ignored (steffkes)
* SOLR-4275: Fix TrieTokenizer to no longer throw StringIndexOutOfBoundsException
in admin UI / AnalysisRequestHandler when you enter no number to tokenize.
(Uwe Schindler)
Other Changes
----------------------

View File

@ -50,39 +50,54 @@ public class TrieTokenizerFactory extends TokenizerFactory {
@Override
public TrieTokenizer create(Reader input) {
return new TrieTokenizer(input, type, precisionStep, TrieTokenizer.getNumericTokenStream(precisionStep));
return new TrieTokenizer(input, type, TrieTokenizer.getNumericTokenStream(precisionStep));
}
}
final class TrieTokenizer extends Tokenizer {
protected static final DateField dateField = new DateField();
protected final int precisionStep;
protected final TrieTypes type;
protected final NumericTokenStream ts;
protected final OffsetAttribute ofsAtt = addAttribute(OffsetAttribute.class);
protected int startOfs, endOfs;
protected boolean hasValue;
protected final char[] buf = new char[32];
static NumericTokenStream getNumericTokenStream(int precisionStep) {
return new NumericTokenStream(precisionStep);
}
public TrieTokenizer(Reader input, TrieTypes type, int precisionStep, NumericTokenStream ts) {
public TrieTokenizer(Reader input, TrieTypes type, NumericTokenStream ts) {
// must share the attribute source with the NumericTokenStream we delegate to
super(ts, input);
this.type = type;
this.precisionStep = precisionStep;
this.ts = ts;
}
@Override
public void reset() {
try {
char[] buf = new char[32];
int len = input.read(buf);
int upto = 0;
while (upto < buf.length) {
final int length = input.read(buf, upto, buf.length - upto);
if (length == -1) break;
upto += length;
}
// skip remaining data if buffer was too short:
if (upto == buf.length) {
input.skip(Long.MAX_VALUE);
}
this.startOfs = correctOffset(0);
this.endOfs = correctOffset(len);
String v = new String(buf, 0, len);
this.endOfs = correctOffset(upto);
if (upto == 0) {
hasValue = false;
return;
}
final String v = new String(buf, 0, upto);
try {
switch (type) {
case INTEGER:
@ -107,21 +122,24 @@ final class TrieTokenizer extends Tokenizer {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Invalid Number: " + v);
}
hasValue = true;
ts.reset();
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to create TrieIndexTokenizer", e);
}
ts.reset();
}
@Override
public void close() throws IOException {
super.close();
ts.close();
if (hasValue) {
ts.close();
}
}
@Override
public boolean incrementToken() {
if (ts.incrementToken()) {
if (hasValue && ts.incrementToken()) {
ofsAtt.setOffset(startOfs, endOfs);
return true;
}
@ -130,7 +148,9 @@ final class TrieTokenizer extends Tokenizer {
@Override
public void end() throws IOException {
ts.end();
if (hasValue) {
ts.end();
}
ofsAtt.setOffset(endOfs, endOfs);
}
}