mirror of https://github.com/apache/lucene.git
LUCENE-2811: SegmentInfo now tracks whether a segment wrote term vectors
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1045322 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
fbab97a0f1
commit
1ca059706e
|
@ -450,7 +450,7 @@ final class DocumentsWriter {
|
|||
assert docStoreSegment != null;
|
||||
|
||||
if (infoStream != null) {
|
||||
message("closeDocStore: files=" + openFiles + "; segment=" + docStoreSegment + "; docStoreOffset=" + docStoreOffset + "; numDocsInStore=" + numDocsInStore + "; isSeparate=" + isSeparate);
|
||||
message("closeDocStore: openFiles=" + openFiles + "; segment=" + docStoreSegment + "; docStoreOffset=" + docStoreOffset + "; numDocsInStore=" + numDocsInStore + "; isSeparate=" + isSeparate);
|
||||
}
|
||||
|
||||
closedFiles.clear();
|
||||
|
@ -720,17 +720,21 @@ final class DocumentsWriter {
|
|||
docStoreSegment, numDocsInRAM, numDocsInStore, writer.getConfig().getTermIndexInterval(),
|
||||
SegmentCodecs.build(fieldInfos, writer.codecs));
|
||||
|
||||
newSegment = new SegmentInfo(segment, numDocsInRAM, directory, false, -1, null, false, hasProx(), flushState.segmentCodecs);
|
||||
newSegment = new SegmentInfo(segment, numDocsInRAM, directory, false, -1, null, false, hasProx(), flushState.segmentCodecs, false);
|
||||
|
||||
if (!closeDocStore || docStoreOffset != 0) {
|
||||
newSegment.setDocStoreSegment(docStoreSegment);
|
||||
newSegment.setDocStoreOffset(docStoreOffset);
|
||||
}
|
||||
|
||||
boolean hasVectors = false;
|
||||
|
||||
if (closeDocStore) {
|
||||
closeDocStore(flushState, writer, deleter, newSegment, mergePolicy, segmentInfos);
|
||||
}
|
||||
|
||||
hasVectors |= flushState.hasVectors;
|
||||
|
||||
if (numDocsInRAM > 0) {
|
||||
|
||||
assert nextDocID == numDocsInRAM;
|
||||
|
@ -749,6 +753,19 @@ final class DocumentsWriter {
|
|||
final long startNumBytesUsed = bytesUsed();
|
||||
consumer.flush(threads, flushState);
|
||||
|
||||
hasVectors |= flushState.hasVectors;
|
||||
|
||||
if (hasVectors) {
|
||||
if (infoStream != null) {
|
||||
message("new segment has vectors");
|
||||
}
|
||||
newSegment.setHasVectors(true);
|
||||
} else {
|
||||
if (infoStream != null) {
|
||||
message("new segment has no vectors");
|
||||
}
|
||||
}
|
||||
|
||||
if (infoStream != null) {
|
||||
message("flushedFiles=" + flushState.flushedFiles);
|
||||
message("flushed codecs=" + newSegment.getSegmentCodecs());
|
||||
|
|
|
@ -1047,8 +1047,10 @@ public class IndexWriter implements Closeable {
|
|||
|
||||
mergePolicy.close();
|
||||
|
||||
synchronized(this) {
|
||||
finishMerges(waitForMerges);
|
||||
stopMerges = true;
|
||||
}
|
||||
|
||||
mergeScheduler.close();
|
||||
|
||||
|
@ -1251,14 +1253,16 @@ public class IndexWriter implements Closeable {
|
|||
synchronized (this) {
|
||||
// If docWriter has some aborted files that were
|
||||
// never incref'd, then we clean them up here
|
||||
deleter.checkpoint(segmentInfos, false);
|
||||
if (docWriter != null) {
|
||||
final Collection<String> files = docWriter.abortedFiles();
|
||||
if (files != null)
|
||||
if (files != null) {
|
||||
deleter.deleteNewFiles(files);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (doFlush)
|
||||
flush(true, false, false);
|
||||
} catch (OutOfMemoryError oom) {
|
||||
|
@ -1879,7 +1883,14 @@ public class IndexWriter implements Closeable {
|
|||
}
|
||||
|
||||
try {
|
||||
synchronized(this) {
|
||||
finishMerges(false);
|
||||
stopMerges = true;
|
||||
}
|
||||
|
||||
if (infoStream != null ) {
|
||||
message("rollback: done finish merges");
|
||||
}
|
||||
|
||||
// Must pre-close these two, in case they increment
|
||||
// changeCount so that we can then set it to false
|
||||
|
@ -2230,7 +2241,8 @@ public class IndexWriter implements Closeable {
|
|||
int docCount = merger.merge(); // merge 'em
|
||||
|
||||
SegmentInfo info = new SegmentInfo(mergedName, docCount, directory,
|
||||
false, -1, null, false, merger.hasProx(), merger.getSegmentCodecs());
|
||||
false, -1, null, false, merger.hasProx(), merger.getSegmentCodecs(),
|
||||
merger.hasVectors());
|
||||
setDiagnostics(info, "addIndexes(IndexReader...)");
|
||||
|
||||
boolean useCompoundFile;
|
||||
|
@ -2541,9 +2553,17 @@ public class IndexWriter implements Closeable {
|
|||
return false;
|
||||
} finally {
|
||||
flushControl.clearFlushPending();
|
||||
if (!success && infoStream != null) {
|
||||
if (!success) {
|
||||
if (infoStream != null) {
|
||||
message("hit exception during flush");
|
||||
}
|
||||
if (docWriter != null) {
|
||||
final Collection<String> files = docWriter.abortedFiles();
|
||||
if (files != null) {
|
||||
deleter.deleteNewFiles(files);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2928,6 +2948,7 @@ public class IndexWriter implements Closeable {
|
|||
|
||||
boolean mergeDocStores = false;
|
||||
boolean doFlushDocStore = false;
|
||||
boolean hasVectors = false;
|
||||
final String currentDocStoreSegment = docWriter.getDocStoreSegment();
|
||||
|
||||
// Test each segment to be merged: check if we need to
|
||||
|
@ -2939,6 +2960,10 @@ public class IndexWriter implements Closeable {
|
|||
if (si.hasDeletions())
|
||||
mergeDocStores = true;
|
||||
|
||||
if (si.getHasVectors()) {
|
||||
hasVectors = true;
|
||||
}
|
||||
|
||||
// If it has its own (private) doc stores we must
|
||||
// merge the doc stores
|
||||
if (-1 == si.getDocStoreOffset())
|
||||
|
@ -3014,6 +3039,7 @@ public class IndexWriter implements Closeable {
|
|||
updatePendingMerges(1, false);
|
||||
}
|
||||
|
||||
merge.hasVectors = hasVectors;
|
||||
merge.mergeDocStores = mergeDocStores;
|
||||
|
||||
// Bind a new segment name here so even with
|
||||
|
@ -3024,8 +3050,8 @@ public class IndexWriter implements Closeable {
|
|||
docStoreSegment,
|
||||
docStoreIsCompoundFile,
|
||||
false,
|
||||
null);
|
||||
|
||||
null,
|
||||
false);
|
||||
|
||||
Map<String,String> details = new HashMap<String,String>();
|
||||
details.put("optimize", Boolean.toString(merge.optimize));
|
||||
|
@ -3033,6 +3059,10 @@ public class IndexWriter implements Closeable {
|
|||
details.put("mergeDocStores", Boolean.toString(mergeDocStores));
|
||||
setDiagnostics(merge.info, "merge", details);
|
||||
|
||||
if (infoStream != null) {
|
||||
message("merge seg=" + merge.info.name + " mergeDocStores=" + mergeDocStores);
|
||||
}
|
||||
|
||||
// Also enroll the merged segment into mergingSegments;
|
||||
// this prevents it from getting selected for a merge
|
||||
// after our merge is done but while we are building the
|
||||
|
@ -3252,6 +3282,7 @@ public class IndexWriter implements Closeable {
|
|||
|
||||
// Record which codec was used to write the segment
|
||||
merge.info.setSegmentCodecs(merger.getSegmentCodecs());
|
||||
merge.info.setHasVectors(merger.hasVectors() || merge.hasVectors);
|
||||
|
||||
if (infoStream != null) {
|
||||
message("merge segmentCodecs=" + merger.getSegmentCodecs());
|
||||
|
@ -3446,7 +3477,7 @@ public class IndexWriter implements Closeable {
|
|||
// are called, deleter should know about every
|
||||
// file referenced by the current head
|
||||
// segmentInfos:
|
||||
assert deleter.exists(fileName) : "IndexFileDeleter doesn't know about file " + fileName;
|
||||
assert deleter.exists(fileName): "IndexFileDeleter doesn't know about file " + fileName;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -68,6 +68,7 @@ public abstract class MergePolicy implements java.io.Closeable {
|
|||
|
||||
SegmentInfo info; // used by IndexWriter
|
||||
boolean mergeDocStores; // used by IndexWriter
|
||||
boolean hasVectors; // used by IndexWriter
|
||||
boolean optimize; // used by IndexWriter
|
||||
boolean registerDone; // used by IndexWriter
|
||||
long mergeGen; // used by IndexWriter
|
||||
|
@ -156,6 +157,9 @@ public abstract class MergePolicy implements java.io.Closeable {
|
|||
if (mergeDocStores) {
|
||||
b.append(" [mergeDocStores]");
|
||||
}
|
||||
if (aborted) {
|
||||
b.append(" [ABORTED]");
|
||||
}
|
||||
return b.toString();
|
||||
}
|
||||
|
||||
|
|
|
@ -80,12 +80,15 @@ public final class SegmentInfo {
|
|||
|
||||
private boolean hasProx; // True if this segment has any fields with omitTermFreqAndPositions==false
|
||||
|
||||
private byte hasVectors; // 0 if no; 1 if yes; 2 if must-check-filesystem (old index)
|
||||
|
||||
private SegmentCodecs segmentCodecs;
|
||||
|
||||
private Map<String,String> diagnostics;
|
||||
|
||||
public SegmentInfo(String name, int docCount, Directory dir, boolean isCompoundFile, int docStoreOffset,
|
||||
String docStoreSegment, boolean docStoreIsCompoundFile, boolean hasProx, SegmentCodecs segmentCodecs) {
|
||||
String docStoreSegment, boolean docStoreIsCompoundFile, boolean hasProx, SegmentCodecs segmentCodecs,
|
||||
boolean hasVectors) {
|
||||
this.name = name;
|
||||
this.docCount = docCount;
|
||||
this.dir = dir;
|
||||
|
@ -96,6 +99,7 @@ public final class SegmentInfo {
|
|||
this.docStoreIsCompoundFile = docStoreIsCompoundFile;
|
||||
this.hasProx = hasProx;
|
||||
this.segmentCodecs = segmentCodecs;
|
||||
this.hasVectors = (byte) (hasVectors ? 1 : 0);
|
||||
delCount = 0;
|
||||
assert docStoreOffset == -1 || docStoreSegment != null: "dso=" + docStoreOffset + " dss=" + docStoreSegment + " docCount=" + docCount;
|
||||
}
|
||||
|
@ -111,6 +115,7 @@ public final class SegmentInfo {
|
|||
delGen = src.delGen;
|
||||
docStoreOffset = src.docStoreOffset;
|
||||
docStoreIsCompoundFile = src.docStoreIsCompoundFile;
|
||||
hasVectors = src.hasVectors;
|
||||
if (src.normGen == null) {
|
||||
normGen = null;
|
||||
} else {
|
||||
|
@ -184,6 +189,12 @@ public final class SegmentInfo {
|
|||
segmentCodecs.codecs = new Codec[] { codecs.lookup("PreFlex")};
|
||||
}
|
||||
diagnostics = input.readStringStringMap();
|
||||
|
||||
if (format <= DefaultSegmentInfosWriter.FORMAT_HAS_VECTORS) {
|
||||
hasVectors = input.readByte();
|
||||
} else {
|
||||
hasVectors = 2;
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns total size in bytes of all of files used by
|
||||
|
@ -204,6 +215,27 @@ public final class SegmentInfo {
|
|||
return sizeInBytes;
|
||||
}
|
||||
|
||||
public boolean getHasVectors() throws IOException {
|
||||
if (hasVectors == 1) {
|
||||
return true;
|
||||
} else if (hasVectors == 0) {
|
||||
return false;
|
||||
} else {
|
||||
final String storesSegment;
|
||||
if (getDocStoreOffset() != -1) {
|
||||
storesSegment = getDocStoreSegment();
|
||||
} else {
|
||||
storesSegment = name;
|
||||
}
|
||||
return dir.fileExists(IndexFileNames.segmentFileName(storesSegment, "", IndexFileNames.VECTORS_INDEX_EXTENSION));
|
||||
}
|
||||
}
|
||||
|
||||
public void setHasVectors(boolean v) {
|
||||
hasVectors = (byte) (v ? 1 : 0);
|
||||
clearFiles();
|
||||
}
|
||||
|
||||
public boolean hasDeletions() {
|
||||
// Cases:
|
||||
//
|
||||
|
@ -229,18 +261,14 @@ public final class SegmentInfo {
|
|||
|
||||
@Override
|
||||
public Object clone() {
|
||||
SegmentInfo si = new SegmentInfo(name, docCount, dir, isCompoundFile, docStoreOffset, docStoreSegment, docStoreIsCompoundFile, hasProx, segmentCodecs);
|
||||
si.isCompoundFile = isCompoundFile;
|
||||
SegmentInfo si = new SegmentInfo(name, docCount, dir, isCompoundFile, docStoreOffset, docStoreSegment, docStoreIsCompoundFile, hasProx, segmentCodecs, false);
|
||||
si.delGen = delGen;
|
||||
si.delCount = delCount;
|
||||
si.hasProx = hasProx;
|
||||
si.diagnostics = new HashMap<String, String>(diagnostics);
|
||||
if (normGen != null) {
|
||||
si.normGen = normGen.clone();
|
||||
}
|
||||
si.docStoreOffset = docStoreOffset;
|
||||
si.docStoreSegment = docStoreSegment;
|
||||
si.docStoreIsCompoundFile = docStoreIsCompoundFile;
|
||||
si.hasVectors = hasVectors;
|
||||
return si;
|
||||
}
|
||||
|
||||
|
@ -404,6 +432,7 @@ public final class SegmentInfo {
|
|||
output.writeByte((byte) (hasProx ? 1:0));
|
||||
segmentCodecs.write(output);
|
||||
output.writeStringStringMap(diagnostics);
|
||||
output.writeByte(hasVectors);
|
||||
}
|
||||
|
||||
void setHasProx(boolean hasProx) {
|
||||
|
@ -466,12 +495,30 @@ public final class SegmentInfo {
|
|||
if (docStoreIsCompoundFile) {
|
||||
fileSet.add(IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.COMPOUND_FILE_STORE_EXTENSION));
|
||||
} else {
|
||||
for (String ext : IndexFileNames.STORE_INDEX_EXTENSIONS)
|
||||
addIfExists(fileSet, IndexFileNames.segmentFileName(docStoreSegment, "", ext));
|
||||
fileSet.add(IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.FIELDS_INDEX_EXTENSION));
|
||||
fileSet.add(IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.FIELDS_EXTENSION));
|
||||
if (hasVectors == 1) {
|
||||
fileSet.add(IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_INDEX_EXTENSION));
|
||||
fileSet.add(IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
|
||||
fileSet.add(IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION));
|
||||
} else if (hasVectors == 2) {
|
||||
addIfExists(fileSet, IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_INDEX_EXTENSION));
|
||||
addIfExists(fileSet, IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
|
||||
addIfExists(fileSet, IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION));
|
||||
}
|
||||
}
|
||||
} else if (!useCompoundFile) {
|
||||
for (String ext : IndexFileNames.STORE_INDEX_EXTENSIONS)
|
||||
addIfExists(fileSet, IndexFileNames.segmentFileName(name, "", ext));
|
||||
fileSet.add(IndexFileNames.segmentFileName(name, "", IndexFileNames.FIELDS_INDEX_EXTENSION));
|
||||
fileSet.add(IndexFileNames.segmentFileName(name, "", IndexFileNames.FIELDS_EXTENSION));
|
||||
if (hasVectors == 1) {
|
||||
fileSet.add(IndexFileNames.segmentFileName(name, "", IndexFileNames.VECTORS_INDEX_EXTENSION));
|
||||
fileSet.add(IndexFileNames.segmentFileName(name, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
|
||||
fileSet.add(IndexFileNames.segmentFileName(name, "", IndexFileNames.VECTORS_FIELDS_EXTENSION));
|
||||
} else if (hasVectors == 2) {
|
||||
addIfExists(fileSet, IndexFileNames.segmentFileName(name, "", IndexFileNames.VECTORS_INDEX_EXTENSION));
|
||||
addIfExists(fileSet, IndexFileNames.segmentFileName(name, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
|
||||
addIfExists(fileSet, IndexFileNames.segmentFileName(name, "", IndexFileNames.VECTORS_FIELDS_EXTENSION));
|
||||
}
|
||||
}
|
||||
|
||||
String delFileName = IndexFileNames.fileNameFromGeneration(name, IndexFileNames.DELETES_EXTENSION, delGen);
|
||||
|
|
|
@ -75,6 +75,7 @@ final class SegmentMerger {
|
|||
private final CodecProvider codecs;
|
||||
private Codec codec;
|
||||
private SegmentWriteState segmentWriteState;
|
||||
private boolean hasVectors;
|
||||
|
||||
private PayloadProcessorProvider payloadProcessorProvider;
|
||||
|
||||
|
@ -100,6 +101,10 @@ final class SegmentMerger {
|
|||
return fieldInfos.hasProx();
|
||||
}
|
||||
|
||||
boolean hasVectors() {
|
||||
return hasVectors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an IndexReader to the collection of readers that are to be merged
|
||||
* @param reader
|
||||
|
@ -427,7 +432,7 @@ final class SegmentMerger {
|
|||
private final void mergeVectors() throws IOException {
|
||||
TermVectorsWriter termVectorsWriter =
|
||||
new TermVectorsWriter(directory, segment, fieldInfos);
|
||||
|
||||
hasVectors = true;
|
||||
try {
|
||||
int idx = 0;
|
||||
for (final IndexReader reader : readers) {
|
||||
|
|
|
@ -243,7 +243,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
|||
throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + fieldsReaderOrig.size() + " but segmentInfo shows " + si.docCount);
|
||||
}
|
||||
|
||||
if (fieldInfos.hasVectors()) { // open term vector files only as needed
|
||||
if (si.getHasVectors()) { // open term vector files only as needed
|
||||
termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.getDocStoreOffset(), si.docCount);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ public class SegmentWriteState {
|
|||
public final String docStoreSegmentName;
|
||||
public final int numDocs;
|
||||
public int numDocsInStore;
|
||||
public boolean hasVectors;
|
||||
public final Collection<String> flushedFiles;
|
||||
|
||||
final SegmentCodecs segmentCodecs;
|
||||
|
|
|
@ -77,7 +77,6 @@ class TermVectorsReader implements Cloneable {
|
|||
|
||||
try {
|
||||
String idxName = IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_INDEX_EXTENSION);
|
||||
if (d.fileExists(idxName)) {
|
||||
tvx = d.openInput(idxName, readBufferSize);
|
||||
format = checkValidFormat(tvx, idxName);
|
||||
String fn = IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
|
||||
|
@ -103,13 +102,6 @@ class TermVectorsReader implements Cloneable {
|
|||
// docs
|
||||
assert numTotalDocs >= size + docStoreOffset: "numTotalDocs=" + numTotalDocs + " size=" + size + " docStoreOffset=" + docStoreOffset;
|
||||
}
|
||||
} else {
|
||||
// If all documents flushed in a segment had hit
|
||||
// non-aborting exceptions, it's possible that
|
||||
// FieldInfos.hasVectors returns true yet the term
|
||||
// vector files don't exist.
|
||||
format = 0;
|
||||
}
|
||||
|
||||
this.fieldInfos = fieldInfos;
|
||||
success = true;
|
||||
|
|
|
@ -37,6 +37,7 @@ final class TermVectorsTermsWriter extends TermsHashConsumer {
|
|||
IndexOutput tvd;
|
||||
IndexOutput tvf;
|
||||
int lastDocID;
|
||||
boolean hasVectors;
|
||||
|
||||
public TermVectorsTermsWriter(DocumentsWriter docWriter) {
|
||||
this.docWriter = docWriter;
|
||||
|
@ -57,6 +58,7 @@ final class TermVectorsTermsWriter extends TermsHashConsumer {
|
|||
// because, although FieldInfos.hasVectors() will return
|
||||
// true, the TermVectorsReader gracefully handles
|
||||
// non-existence of the term vectors files.
|
||||
state.hasVectors = hasVectors;
|
||||
|
||||
if (tvx != null) {
|
||||
|
||||
|
@ -108,6 +110,8 @@ final class TermVectorsTermsWriter extends TermsHashConsumer {
|
|||
docWriter.removeOpenFile(docName);
|
||||
|
||||
lastDocID = 0;
|
||||
state.hasVectors = hasVectors;
|
||||
hasVectors = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -159,6 +163,7 @@ final class TermVectorsTermsWriter extends TermsHashConsumer {
|
|||
String idxName = IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_INDEX_EXTENSION);
|
||||
String docName = IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
|
||||
String fldName = IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION);
|
||||
hasVectors = true;
|
||||
tvx = docWriter.directory.createOutput(idxName);
|
||||
tvd = docWriter.directory.createOutput(docName);
|
||||
tvf = docWriter.directory.createOutput(fldName);
|
||||
|
@ -218,6 +223,7 @@ final class TermVectorsTermsWriter extends TermsHashConsumer {
|
|||
|
||||
@Override
|
||||
public void abort() {
|
||||
hasVectors = false;
|
||||
if (tvx != null) {
|
||||
try {
|
||||
tvx.close();
|
||||
|
|
|
@ -39,9 +39,12 @@ public class DefaultSegmentInfosWriter extends SegmentInfosWriter {
|
|||
* in the new flex format */
|
||||
public static final int FORMAT_4_0 = -10;
|
||||
|
||||
/** Each segment records whether it has term vectors */
|
||||
public static final int FORMAT_HAS_VECTORS = -11;
|
||||
|
||||
/** This must always point to the most recent file format.
|
||||
* whenever you add a new format, make it 1 smaller (negative version logic)! */
|
||||
public static final int FORMAT_CURRENT = FORMAT_4_0;
|
||||
public static final int FORMAT_CURRENT = FORMAT_HAS_VECTORS;
|
||||
|
||||
/** This must always point to the first supported file format. */
|
||||
public static final int FORMAT_MINIMUM = FORMAT_DIAGNOSTICS;
|
||||
|
|
|
@ -39,7 +39,6 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
public class TestAddIndexes extends LuceneTestCase {
|
||||
|
@ -642,10 +641,12 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
addDoc(writer);
|
||||
writer.close();
|
||||
|
||||
dir2 = new MockDirectoryWrapper(random, new RAMDirectory());
|
||||
dir2 = newDirectory();
|
||||
writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer2.setInfoStream(VERBOSE ? System.out : null);
|
||||
writer2.commit();
|
||||
|
||||
|
||||
readers = new IndexReader[NUM_COPY];
|
||||
for(int i=0;i<NUM_COPY;i++)
|
||||
readers[i] = IndexReader.open(dir, true);
|
||||
|
@ -919,9 +920,12 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
CommitAndAddIndexes3 c = new CommitAndAddIndexes3(NUM_COPY);
|
||||
c.launchThreads(-1);
|
||||
|
||||
Thread.sleep(500);
|
||||
Thread.sleep(_TestUtil.nextInt(random, 100, 500));
|
||||
|
||||
// Close w/o first stopping/joining the threads
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: now force rollback");
|
||||
}
|
||||
c.didClose = true;
|
||||
c.writer2.rollback();
|
||||
|
||||
|
|
|
@ -270,7 +270,7 @@ public class TestCodecs extends LuceneTestCase {
|
|||
|
||||
final Directory dir = newDirectory();
|
||||
this.write(fieldInfos, dir, fields);
|
||||
final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, -1, SEGMENT, false, true, SegmentCodecs.build(fieldInfos, CodecProvider.getDefault()));
|
||||
final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, -1, SEGMENT, false, true, SegmentCodecs.build(fieldInfos, CodecProvider.getDefault()), fieldInfos.hasVectors());
|
||||
si.setHasProx(false);
|
||||
|
||||
final FieldsProducer reader = si.getSegmentCodecs().codec().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, 64, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR));
|
||||
|
@ -318,7 +318,7 @@ public class TestCodecs extends LuceneTestCase {
|
|||
final Directory dir = newDirectory();
|
||||
|
||||
this.write(fieldInfos, dir, fields);
|
||||
final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, -1, SEGMENT, false, true, SegmentCodecs.build(fieldInfos, CodecProvider.getDefault()));
|
||||
final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, -1, SEGMENT, false, true, SegmentCodecs.build(fieldInfos, CodecProvider.getDefault()), fieldInfos.hasVectors());
|
||||
|
||||
final FieldsProducer terms = si.getSegmentCodecs().codec().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, 1024, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR));
|
||||
|
||||
|
|
|
@ -201,7 +201,8 @@ public class TestDoc extends LuceneTestCase {
|
|||
r2.close();
|
||||
|
||||
final SegmentInfo info = new SegmentInfo(merged, si1.docCount + si2.docCount, si1.dir,
|
||||
useCompoundFile, -1, null, false, merger.hasProx(), merger.getSegmentCodecs());
|
||||
useCompoundFile, -1, null, false, merger.hasProx(), merger.getSegmentCodecs(),
|
||||
merger.hasVectors());
|
||||
|
||||
if (useCompoundFile) {
|
||||
Collection<String> filesToDelete = merger.createCompoundFile(merged + ".cfs", info);
|
||||
|
|
|
@ -881,10 +881,14 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testEmptyDocAfterFlushingRealDoc() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
Document doc = new Document();
|
||||
doc.add(newField("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
writer.addDocument(doc);
|
||||
writer.commit();
|
||||
if (VERBOSE) {
|
||||
System.out.println("\nTEST: now add empty doc");
|
||||
}
|
||||
writer.addDocument(new Document());
|
||||
writer.close();
|
||||
_TestUtil.checkIndex(dir);
|
||||
|
@ -1027,7 +1031,11 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
int delID = 0;
|
||||
for(int i=0;i<20;i++) {
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: iter=" + i);
|
||||
}
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
|
||||
lmp.setMergeFactor(2);
|
||||
lmp.setUseCompoundFile(false);
|
||||
|
|
|
@ -51,7 +51,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
System.out.println("TEST: pass=" + pass);
|
||||
}
|
||||
boolean doAbort = pass == 1;
|
||||
long diskFree = 200;
|
||||
long diskFree = _TestUtil.nextInt(random, 100, 300);
|
||||
while(true) {
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: cycle: diskFree=" + diskFree);
|
||||
|
@ -120,7 +120,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
dir.close();
|
||||
// Now try again w/ more space:
|
||||
|
||||
diskFree += 500;
|
||||
diskFree += _TestUtil.nextInt(random, 400, 600);
|
||||
} else {
|
||||
//_TestUtil.syncConcurrentMerges(writer);
|
||||
dir.setMaxSizeInBytes(0);
|
||||
|
|
|
@ -80,7 +80,8 @@ public class TestSegmentMerger extends LuceneTestCase {
|
|||
assertTrue(docsMerged == 2);
|
||||
//Should be able to open a new SegmentReader against the new directory
|
||||
SegmentReader mergedReader = SegmentReader.get(false, mergedDir, new SegmentInfo(mergedSegment, docsMerged, mergedDir, false, -1,
|
||||
null, false, merger.hasProx(), merger.getSegmentCodecs()), BufferedIndexInput.BUFFER_SIZE, true, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
|
||||
null, false, merger.hasProx(), merger.getSegmentCodecs(), merger.hasVectors()),
|
||||
BufferedIndexInput.BUFFER_SIZE, true, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
|
||||
|
||||
assertTrue(mergedReader != null);
|
||||
assertTrue(mergedReader.numDocs() == 2);
|
||||
|
|
|
@ -106,6 +106,7 @@ public class MockIndexOutputWrapper extends IndexOutput {
|
|||
message += ")";
|
||||
if (LuceneTestCase.VERBOSE) {
|
||||
System.out.println(Thread.currentThread().getName() + ": MDW: now throw fake disk full");
|
||||
new Throwable().printStackTrace(System.out);
|
||||
}
|
||||
throw new IOException(message);
|
||||
} else {
|
||||
|
|
Loading…
Reference in New Issue