mirror of https://github.com/apache/lucene.git
LUCENE-2811: fix bug in IndexWriter.addIndexes(Directory[]) caused by SI.hasVector
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1050655 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
66b2c016b0
commit
b9777a3a46
|
@ -2151,7 +2151,8 @@ public class IndexWriter implements Closeable {
|
|||
}
|
||||
SegmentInfos sis = new SegmentInfos(codecs); // read infos from dir
|
||||
sis.read(dir, codecs);
|
||||
Map<String, String> dsNames = new HashMap<String, String>();
|
||||
final Set<String> dsFilesCopied = new HashSet<String>();
|
||||
final Map<String, String> dsNames = new HashMap<String, String>();
|
||||
for (SegmentInfo info : sis) {
|
||||
assert !infos.contains(info): "dup info dir=" + info.dir + " name=" + info.name;
|
||||
|
||||
|
@ -2160,7 +2161,7 @@ public class IndexWriter implements Closeable {
|
|||
String dsName = info.getDocStoreSegment();
|
||||
|
||||
if (infoStream != null) {
|
||||
message("addIndexes: process segment origName=" + info.name + " newName=" + newSegName + " dsName=" + dsName);
|
||||
message("addIndexes: process segment origName=" + info.name + " newName=" + newSegName + " dsName=" + dsName + " info=" + info);
|
||||
}
|
||||
|
||||
// Determine if the doc store of this segment needs to be copied. It's
|
||||
|
@ -2170,22 +2171,32 @@ public class IndexWriter implements Closeable {
|
|||
// NOTE: pre-3x segments include a null DSName if they don't share doc
|
||||
// store. So the following code ensures we don't accidentally insert
|
||||
// 'null' to the map.
|
||||
String newDsName = newSegName;
|
||||
boolean docStoreCopied = false;
|
||||
final String newDsName;
|
||||
if (dsName != null) {
|
||||
if (dsNames.containsKey(dsName)) {
|
||||
newDsName = dsNames.get(dsName);
|
||||
docStoreCopied = true;
|
||||
} else if (dsName != null) {
|
||||
} else {
|
||||
dsNames.put(dsName, newSegName);
|
||||
docStoreCopied = false;
|
||||
newDsName = newSegName;
|
||||
}
|
||||
} else {
|
||||
newDsName = newSegName;
|
||||
}
|
||||
|
||||
// Copy the segment files
|
||||
for (String file: info.files()) {
|
||||
if (docStoreCopied && IndexFileNames.isDocStoreFile(file)) {
|
||||
final String newFileName;
|
||||
if (IndexFileNames.isDocStoreFile(file)) {
|
||||
newFileName = newDsName + IndexFileNames.stripSegmentName(file);
|
||||
if (dsFilesCopied.contains(newFileName)) {
|
||||
continue;
|
||||
}
|
||||
dir.copy(directory, file, newSegName + IndexFileNames.stripSegmentName(file));
|
||||
dsFilesCopied.add(newFileName);
|
||||
} else {
|
||||
newFileName = newSegName + IndexFileNames.stripSegmentName(file);
|
||||
}
|
||||
assert !directory.fileExists(newFileName): "file \"" + newFileName + "\" already exists";
|
||||
dir.copy(directory, file, newFileName);
|
||||
}
|
||||
|
||||
// Update SI appropriately
|
||||
|
|
|
@ -581,6 +581,9 @@ public final class SegmentInfo {
|
|||
if (this.dir != dir) {
|
||||
s.append('x');
|
||||
}
|
||||
if (hasVectors) {
|
||||
s.append('v');
|
||||
}
|
||||
s.append(docCount);
|
||||
|
||||
int delCount = getDelCount() + pendingDelCount;
|
||||
|
|
|
@ -60,7 +60,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
|
|||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
setMergePolicy(newLogMergePolicy(2))
|
||||
);
|
||||
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
writer.addIndexes(indexA, indexB);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
|
|
@ -209,7 +209,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
System.out.println("TEST: iter=" + iter);
|
||||
|
||||
// Start with 100 bytes more than we are currently using:
|
||||
long diskFree = diskUsage+100;
|
||||
long diskFree = diskUsage+_TestUtil.nextInt(random, 50, 200);
|
||||
|
||||
int method = iter;
|
||||
|
||||
|
@ -226,11 +226,15 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
}
|
||||
|
||||
while(!done) {
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: cycle...");
|
||||
}
|
||||
|
||||
// Make a new dir that will enforce disk usage:
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
IOException err = null;
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
|
||||
MergeScheduler ms = writer.getConfig().getMergeScheduler();
|
||||
for(int x=0;x<2;x++) {
|
||||
|
|
Loading…
Reference in New Issue