Fix TestSegmentReader, which broke due to previous changes to DocHelper. getPositionIncrementGap tests no longer use DocHelper, to avoid testing conflicts.

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@348137 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Erik Hatcher 2005-11-22 09:35:58 +00:00
parent 4424265faf
commit e797a8e965
2 changed files with 30 additions and 24 deletions

View File

@ -71,14 +71,6 @@ class DocHelper {
public static Field unStoredField2 = new Field(UNSTORED_FIELD_2_KEY, UNSTORED_2_FIELD_TEXT,
Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.YES);
public static final String REPEATED_1_TEXT = "repeated one";
public static final String REPEATED_KEY = "repeated";
public static Field repeatedField1 = new Field(REPEATED_KEY, REPEATED_1_TEXT,
Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO);
public static final String REPEATED_2_TEXT = "repeated two";
public static Field repeatedField2 = new Field(REPEATED_KEY, REPEATED_2_TEXT,
Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO);
public static Map nameValues = null;
// ordered list of all the fields...
@ -92,8 +84,6 @@ class DocHelper {
unIndField,
unStoredField1,
unStoredField2,
repeatedField1,
repeatedField2
};
// Map<String fieldName, Field field>

View File

@ -27,18 +27,17 @@ import org.apache.lucene.search.Similarity;
import org.apache.lucene.store.RAMDirectory;
import java.io.Reader;
import java.io.IOException;
public class TestDocumentWriter extends TestCase {
private RAMDirectory dir = new RAMDirectory();
private Document testDoc = new Document();
private RAMDirectory dir;
public TestDocumentWriter(String s) {
super(s);
}
protected void setUp() {
DocHelper.setupDoc(testDoc);
dir = new RAMDirectory();
}
protected void tearDown() {
@ -51,15 +50,9 @@ public class TestDocumentWriter extends TestCase {
}
public void testAddDocument() throws Exception {
Analyzer analyzer = new Analyzer() {
public TokenStream tokenStream(String fieldName, Reader reader) {
return new WhitespaceTokenizer(reader);
}
public int getPositionIncrementGap(String fieldName) {
return 500;
}
};
Document testDoc = new Document();
DocHelper.setupDoc(testDoc);
Analyzer analyzer = new WhitespaceAnalyzer();
Similarity similarity = Similarity.getDefault();
DocumentWriter writer = new DocumentWriter(dir, analyzer, similarity, 50);
String segName = "test";
@ -101,7 +94,30 @@ public class TestDocumentWriter extends TestCase {
}
}
TermPositions termPositions = reader.termPositions(new Term(DocHelper.REPEATED_KEY, "repeated"));
}
public void testPositionIncrementGap() throws IOException {
Analyzer analyzer = new Analyzer() {
public TokenStream tokenStream(String fieldName, Reader reader) {
return new WhitespaceTokenizer(reader);
}
public int getPositionIncrementGap(String fieldName) {
return 500;
}
};
Similarity similarity = Similarity.getDefault();
DocumentWriter writer = new DocumentWriter(dir, analyzer, similarity, 50);
Document doc = new Document();
doc.add(new Field("repeated", "repeated one", Field.Store.YES, Field.Index.TOKENIZED));
doc.add(new Field("repeated", "repeated two", Field.Store.YES, Field.Index.TOKENIZED));
String segName = "test";
writer.addDocument(segName, doc);
SegmentReader reader = SegmentReader.get(new SegmentInfo(segName, 1, dir));
TermPositions termPositions = reader.termPositions(new Term("repeated", "repeated"));
assertTrue(termPositions.next());
int freq = termPositions.freq();
assertEquals(2, freq);