LUCENE-1462

InstantiatedIndexWriter did not reset pre analyzed TokenStreams the same way IndexWriter does. 
Parts of InstantiatedIndex was not Serializable.



git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@725837 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Karl-Johan Wettin 2008-12-11 22:08:45 +00:00
parent 2225462178
commit 219a20a945
4 changed files with 12 additions and 2 deletions

View File

@ -15,6 +15,10 @@ Bug fixes
1. LUCENE-1423: InstantiatedTermEnum#skipTo(Term) throws ArrayIndexOutOfBounds on empty index.
(Karl Wettin)
2. LUCENE-1462: InstantiatedIndexWriter did not reset pre analyzed TokenStreams the
same way IndexWriter does. Parts of InstantiatedIndex was not Serializable.
(Karl Wettin)
New features
1. LUCENE-1470: Added TrieRangeQuery, a much faster implementation of

View File

@ -1,5 +1,7 @@
package org.apache.lucene.store.instantiated;
import java.io.Serializable;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@ -20,7 +22,7 @@ package org.apache.lucene.store.instantiated;
/**
* For non package access see {@link org.apache.lucene.index.IndexReader#getFieldNames(org.apache.lucene.index.IndexReader.FieldOption)}
*/
class FieldSetting {
class FieldSetting implements Serializable {
String fieldName;
boolean storeTermVector = false;

View File

@ -3,6 +3,7 @@ package org.apache.lucene.store.instantiated;
import java.util.HashMap;
import java.util.Map;
import java.util.Collection;
import java.io.Serializable;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -24,7 +25,7 @@ import java.util.Collection;
/**
* Essetially a Map<FieldName, {@link org.apache.lucene.store.instantiated.FieldSetting}>
*/
class FieldSettings {
class FieldSettings implements Serializable {
FieldSettings() {

View File

@ -522,6 +522,9 @@ public class InstantiatedIndexWriter {
tokenStream = analyzer.tokenStream(field.name(), new StringReader(field.stringValue()));
}
// reset the TokenStream to the first token
tokenStream.reset();
final Token reusableToken = new Token();
for (Token nextToken = tokenStream.next(reusableToken); nextToken != null; nextToken = tokenStream.next(reusableToken)) {
tokens.add((Token) nextToken.clone()); // the vector will be built on commit.