LUCENE-2811: switch version numbers on trunk due to 3.x index format change; fix hasVectors check to work correctly when doc stores are compound file

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1050440 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2010-12-17 16:16:39 +00:00
parent 2280dde754
commit 66b2c016b0
7 changed files with 28 additions and 9 deletions

View File

@ -727,13 +727,11 @@ final class DocumentsWriter {
newSegment.setDocStoreOffset(docStoreOffset);
}
boolean hasVectors = false;
if (closeDocStore) {
closeDocStore(flushState, writer, deleter, newSegment, mergePolicy, segmentInfos);
}
hasVectors |= flushState.hasVectors;
boolean hasVectors = flushState.hasVectors;
if (numDocsInRAM > 0) {

View File

@ -2719,8 +2719,6 @@ public class IndexWriter implements Closeable {
// format as well:
setMergeDocStoreIsCompoundFile(merge);
merge.info.setHasProx(merger.fieldInfos().hasProx());
segmentInfos.subList(start, start + merge.segments.size()).clear();
assert !segmentInfos.contains(merge.info);
segmentInfos.add(start, merge.info);

View File

@ -116,6 +116,7 @@ public final class SegmentInfo {
docStoreOffset = src.docStoreOffset;
docStoreIsCompoundFile = src.docStoreIsCompoundFile;
hasVectors = src.hasVectors;
hasProx = src.hasProx;
if (src.normGen == null) {
normGen = null;
} else {
@ -194,12 +195,30 @@ public final class SegmentInfo {
hasVectors = input.readByte() == 1;
} else {
final String storesSegment;
final String ext;
final boolean isCompoundFile;
if (docStoreOffset != -1) {
storesSegment = docStoreSegment;
isCompoundFile = docStoreIsCompoundFile;
ext = IndexFileNames.COMPOUND_FILE_STORE_EXTENSION;
} else {
storesSegment = name;
isCompoundFile = getUseCompoundFile();
ext = IndexFileNames.COMPOUND_FILE_EXTENSION;
}
final Directory dirToTest;
if (isCompoundFile) {
dirToTest = new CompoundFileReader(dir, IndexFileNames.segmentFileName(storesSegment, "", ext));
} else {
dirToTest = dir;
}
try {
hasVectors = dirToTest.fileExists(IndexFileNames.segmentFileName(storesSegment, "", IndexFileNames.VECTORS_INDEX_EXTENSION));
} finally {
if (isCompoundFile) {
dirToTest.close();
}
}
hasVectors = dir.fileExists(IndexFileNames.segmentFileName(storesSegment, "", IndexFileNames.VECTORS_INDEX_EXTENSION));
}
}

View File

@ -37,14 +37,14 @@ public class DefaultSegmentInfosWriter extends SegmentInfosWriter {
/** Each segment records whether its postings are written
* in the new flex format */
public static final int FORMAT_4_0 = -10;
public static final int FORMAT_4_0 = -11;
/** Each segment records whether it has term vectors */
public static final int FORMAT_HAS_VECTORS = -11;
public static final int FORMAT_HAS_VECTORS = -10;
/** This must always point to the most recent file format.
* whenever you add a new format, make it 1 smaller (negative version logic)! */
public static final int FORMAT_CURRENT = FORMAT_HAS_VECTORS;
public static final int FORMAT_CURRENT = FORMAT_4_0;
/** This must always point to the first supported file format. */
public static final int FORMAT_MINIMUM = FORMAT_DIAGNOSTICS;

View File

@ -344,6 +344,10 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
f = d.getField("fie\u2C77ld");
assertEquals("field with non-ascii name", f.stringValue());
}
TermFreqVector tfv = reader.getTermFreqVector(i, "utf8");
assertNotNull("docID=" + i + " index=" + dirName, tfv);
assertTrue(tfv instanceof TermPositionVector);
} else
// Only ID 7 is deleted
assertEquals(7, i);