LUCENE-1092: fix KeywordAnalyzer.reusableTokenStream so it can successfully be reused

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@605149 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2007-12-18 09:20:04 +00:00
parent 10c1ec3a66
commit 905674805c
3 changed files with 30 additions and 2 deletions

View File

@ -17,6 +17,7 @@ package org.apache.lucene.analysis;
* limitations under the License.
*/
import java.io.IOException;
import java.io.Reader;
/**
@ -29,12 +30,13 @@ public class KeywordAnalyzer extends Analyzer {
return new KeywordTokenizer(reader);
}
public TokenStream reusableTokenStream(String fieldName,
final Reader reader) {
final Reader reader) throws IOException {
Tokenizer tokenizer = (Tokenizer) getPreviousTokenStream();
if (tokenizer == null) {
tokenizer = new KeywordTokenizer(reader);
setPreviousTokenStream(tokenizer);
}
} else
tokenizer.reset(reader);
return tokenizer;
}
}

View File

@ -55,4 +55,9 @@ public class KeywordTokenizer extends Tokenizer {
}
return null;
}
public void reset(Reader input) throws IOException {
super.reset(input);
this.done = false;
}
}

View File

@ -18,7 +18,10 @@ package org.apache.lucene.analysis;
*/
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -61,4 +64,22 @@ public class TestKeywordAnalyzer extends LuceneTestCase {
"+partnum:Q36 +space", query.toString("description"));
assertEquals("doc found!", 1, hits.length());
}
public void testMutipleDocument() throws Exception {
RAMDirectory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir,new KeywordAnalyzer(), true);
Document doc = new Document();
doc.add(new Field("partnum", "Q36", Field.Store.YES, Field.Index.TOKENIZED));
writer.addDocument(doc);
doc = new Document();
doc.add(new Field("partnum", "Q37", Field.Store.YES, Field.Index.TOKENIZED));
writer.addDocument(doc);
writer.close();
IndexReader reader = IndexReader.open(dir);
TermDocs td = reader.termDocs(new Term("partnum", "Q36"));
assertTrue(td.next());
td = reader.termDocs(new Term("partnum", "Q37"));
assertTrue(td.next());
}
}