LUCENE-2574: add missing seek

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@981661 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2010-08-02 19:20:50 +00:00
parent 7f4a23a0b7
commit b76b8e58f3
2 changed files with 53 additions and 44 deletions

View File

@ -475,6 +475,10 @@ public abstract class FSDirectory extends Directory {
// do the optimized copy
FileChannel in = fsInput.file.getChannel();
// Necessary because BufferedIndexInput does lazy seeking:
in.position(fsInput.getFilePointer());
FileChannel out = file.getChannel();
long pos = out.position();
long writeTo = numBytes + pos;

View File

@ -5070,56 +5070,61 @@ public class TestIndexWriter extends LuceneTestCase {
}
public void testRandomStoredFields() throws IOException {
Directory dir = new MockRAMDirectory();
Random rand = newRandom();
RandomIndexWriter w = new RandomIndexWriter(rand, dir, newIndexWriterConfig(rand, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(_TestUtil.nextInt(rand, 5, 20)));
final int docCount = 200*RANDOM_MULTIPLIER;
final int fieldCount = _TestUtil.nextInt(rand, 1, 5);
String[][] fields = new String[fieldCount][];
for(int i=0;i<fieldCount;i++) {
fields[i] = new String[docCount];
}
File index = _TestUtil.getTempDir("lucenerandfields");
Directory dir = FSDirectory.open(index);
try {
Random rand = newRandom();
RandomIndexWriter w = new RandomIndexWriter(rand, dir, newIndexWriterConfig(rand, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(_TestUtil.nextInt(rand, 5, 20)));
final int docCount = 200*RANDOM_MULTIPLIER;
final int fieldCount = _TestUtil.nextInt(rand, 1, 5);
String[][] fields = new String[fieldCount][];
for(int i=0;i<fieldCount;i++) {
fields[i] = new String[docCount];
}
final List<Integer> fieldIDs = new ArrayList<Integer>();
final List<Integer> fieldIDs = new ArrayList<Integer>();
for(int i=0;i<fieldCount;i++) {
fieldIDs.add(i);
}
for(int i=0;i<fieldCount;i++) {
fieldIDs.add(i);
}
for(int i=0;i<docCount;i++) {
Document doc = new Document();
for(int field: fieldIDs) {
final String s;
if (rand.nextInt(4) != 3) {
s = _TestUtil.randomUnicodeString(rand, 1000);
doc.add(new Field("f"+field, s, Field.Store.YES, Field.Index.NO));
} else {
s = null;
for(int i=0;i<docCount;i++) {
Document doc = new Document();
for(int field: fieldIDs) {
final String s;
if (rand.nextInt(4) != 3) {
s = _TestUtil.randomUnicodeString(rand, 1000);
doc.add(new Field("f"+field, s, Field.Store.YES, Field.Index.NO));
} else {
s = null;
}
fields[field][i] = s;
}
fields[field][i] = s;
}
w.addDocument(doc);
if (rand.nextInt(50) == 17) {
// mixup binding of field name -> Number every so often
Collections.shuffle(fieldIDs);
}
}
for(int x=0;x<2;x++) {
IndexReader r = w.getReader();
for(int iter=0;iter<1000*RANDOM_MULTIPLIER;iter++) {
int docID = rand.nextInt(docCount);
Document doc = r.document(docID);
for(int i=0;i<fieldCount;i++) {
assertEquals(fields[i][docID], doc.get("f"+i));
w.addDocument(doc);
if (rand.nextInt(50) == 17) {
// mixup binding of field name -> Number every so often
Collections.shuffle(fieldIDs);
}
}
r.close();
w.optimize();
}
w.close();
dir.close();
for(int x=0;x<2;x++) {
IndexReader r = w.getReader();
for(int iter=0;iter<1000*RANDOM_MULTIPLIER;iter++) {
int docID = rand.nextInt(docCount);
Document doc = r.document(docID);
for(int i=0;i<fieldCount;i++) {
assertEquals(fields[i][docID], doc.get("f"+i));
}
}
r.close();
w.optimize();
}
w.close();
} finally {
dir.close();
_TestUtil.rmDir(index);
}
}
}