diff --git a/lucene/core/src/java/org/apache/lucene/codecs/Codec.java b/lucene/core/src/java/org/apache/lucene/codecs/Codec.java index c7bb35bd772..5f6c84de2a6 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/Codec.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/Codec.java @@ -71,6 +71,7 @@ public abstract class Codec implements NamedSPILoader.NamedSPI { public abstract FieldInfosFormat fieldInfosFormat(); /** Encodes/decodes segments file */ + // nocommit rename public abstract SegmentInfoFormat segmentInfosFormat(); /** Encodes/decodes document normalization values */ diff --git a/lucene/core/src/java/org/apache/lucene/codecs/LiveDocsFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/LiveDocsFormat.java index a016feb921f..cc17adac749 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/LiveDocsFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/LiveDocsFormat.java @@ -18,9 +18,10 @@ package org.apache.lucene.codecs; */ import java.io.IOException; +import java.util.Collection; import java.util.Set; -import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.index.SegmentInfoPerCommit; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.Bits; @@ -36,14 +37,14 @@ public abstract class LiveDocsFormat { public abstract MutableBits newLiveDocs(Bits existing) throws IOException; /** Read live docs bits. */ - public abstract Bits readLiveDocs(Directory dir, SegmentInfo info, IOContext context) throws IOException; + public abstract Bits readLiveDocs(Directory dir, SegmentInfoPerCommit info, IOContext context) throws IOException; /** Persist live docs bits. Use {@link - * SegmentInfo#getNextDelGen} to determine the + * SegmentInfoPerCommit#getNextDelGen} to determine the * generation of the deletes file you should write to. */ - public abstract void writeLiveDocs(MutableBits bits, Directory dir, SegmentInfo info, int newDelCount, IOContext context) throws IOException; + public abstract void writeLiveDocs(MutableBits bits, Directory dir, SegmentInfoPerCommit info, int newDelCount, IOContext context) throws IOException; /** Records all files (exact file name or a Pattern regex) - * in use by this {@link SegmentInfo} into the files argument. */ - public abstract void files(SegmentInfo info, Set files) throws IOException; + * in use by this {@link SegmentInfoPerCommit} into the files argument. */ + public abstract void files(SegmentInfoPerCommit info, Collection files) throws IOException; } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene3x/Lucene3xCodec.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene3x/Lucene3xCodec.java index f3e2864bd45..a29c83ee42e 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene3x/Lucene3xCodec.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene3x/Lucene3xCodec.java @@ -35,6 +35,7 @@ import org.apache.lucene.codecs.lucene40.Lucene40LiveDocsFormat; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.PerDocWriteState; import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.index.SegmentInfoPerCommit; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; @@ -68,7 +69,7 @@ public class Lucene3xCodec extends Codec { // TODO: this should really be a different impl private final LiveDocsFormat liveDocsFormat = new Lucene40LiveDocsFormat() { @Override - public void writeLiveDocs(MutableBits bits, Directory dir, SegmentInfo info, int newDelCount, IOContext context) throws IOException { + public void writeLiveDocs(MutableBits bits, Directory dir, SegmentInfoPerCommit info, int newDelCount, IOContext context) throws IOException { throw new UnsupportedOperationException("this codec can only be used for reading"); } }; diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene3x/Lucene3xNormsProducer.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene3x/Lucene3xNormsProducer.java index 133c04b1f80..4e57afa6554 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene3x/Lucene3xNormsProducer.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene3x/Lucene3xNormsProducer.java @@ -147,7 +147,7 @@ class Lucene3xNormsProducer extends PerDocProducer { return IndexFileNames.fileNameFromGeneration(segmentName, SEPARATE_NORMS_EXTENSION + number, normGen.get(number)); } else { // single file for all norms - return IndexFileNames.fileNameFromGeneration(segmentName, NORMS_EXTENSION, SegmentInfo.WITHOUT_GEN); + return IndexFileNames.segmentFileName(segmentName, "", NORMS_EXTENSION); } } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene3x/Lucene3xSegmentInfoReader.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene3x/Lucene3xSegmentInfoReader.java index e1ff7d42e70..a0ef53a99b4 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene3x/Lucene3xSegmentInfoReader.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene3x/Lucene3xSegmentInfoReader.java @@ -29,6 +29,7 @@ import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.index.SegmentInfoPerCommit; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.CompoundFileDirectory; @@ -50,7 +51,9 @@ public class Lucene3xSegmentInfoReader extends SegmentInfoReader { infos.counter = input.readInt(); // read counter Lucene3xSegmentInfoReader reader = new Lucene3xSegmentInfoReader(); for (int i = input.readInt(); i > 0; i--) { // read segmentInfos - SegmentInfo si = reader.readSegmentInfo(null, directory, format, input); + SegmentInfoPerCommit siPerCommit = reader.readSegmentInfo(null, directory, format, input); + SegmentInfo si = siPerCommit.info; + if (si.getVersion() == null) { // Could be a 3.0 - try to open the doc stores - if it fails, it's a // 2.x segment, and an IndexFormatTooOldException will be thrown, @@ -84,7 +87,7 @@ public class Lucene3xSegmentInfoReader extends SegmentInfoReader { // appropriate exception. throw new IndexFormatTooOldException("segment " + si.name + " in resource " + input, si.getVersion()); } - infos.add(si); + infos.add(siPerCommit); } infos.userData = input.readStringStringMap(); @@ -105,7 +108,7 @@ public class Lucene3xSegmentInfoReader extends SegmentInfoReader { IndexInput input = directory.openInput(fileName, context); try { - SegmentInfo si = readSegmentInfo(segmentName, directory, format, input); + SegmentInfo si = readSegmentInfo(segmentName, directory, format, input).info; success = true; return si; } finally { @@ -123,7 +126,7 @@ public class Lucene3xSegmentInfoReader extends SegmentInfoReader { } } - private SegmentInfo readSegmentInfo(String segmentName, Directory dir, int format, IndexInput input) throws IOException { + private SegmentInfoPerCommit readSegmentInfo(String segmentName, Directory dir, int format, IndexInput input) throws IOException { // check that it is a format we can understand if (format > Lucene3xSegmentInfoFormat.FORMAT_DIAGNOSTICS) { throw new IndexFormatTooOldException(input, format, @@ -243,9 +246,10 @@ public class Lucene3xSegmentInfoReader extends SegmentInfoReader { SegmentInfo info = new SegmentInfo(dir, version, segmentName, docCount, docStoreOffset, docStoreSegment, docStoreIsCompoundFile, normGen, isCompoundFile, - delCount, null, diagnostics); - info.setDelGen(delGen); + null, diagnostics); info.setFiles(files); - return info; + + SegmentInfoPerCommit infoPerCommit = new SegmentInfoPerCommit(info, delCount, delGen); + return infoPerCommit; } } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40LiveDocsFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40LiveDocsFormat.java index df81d27e0fe..2018b6a4550 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40LiveDocsFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40LiveDocsFormat.java @@ -18,11 +18,13 @@ package org.apache.lucene.codecs.lucene40; */ import java.io.IOException; +import java.util.Collection; import java.util.Set; import org.apache.lucene.codecs.LiveDocsFormat; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.index.SegmentInfoPerCommit; import org.apache.lucene.store.DataOutput; // javadocs import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; @@ -81,28 +83,28 @@ public class Lucene40LiveDocsFormat extends LiveDocsFormat { } @Override - public Bits readLiveDocs(Directory dir, SegmentInfo info, IOContext context) throws IOException { - String filename = IndexFileNames.fileNameFromGeneration(info.name, DELETES_EXTENSION, info.getDelGen()); + public Bits readLiveDocs(Directory dir, SegmentInfoPerCommit info, IOContext context) throws IOException { + String filename = IndexFileNames.fileNameFromGeneration(info.info.name, DELETES_EXTENSION, info.getDelGen()); final BitVector liveDocs = new BitVector(dir, filename, context); - assert liveDocs.count() == info.docCount - info.getDelCount(): - "liveDocs.count()=" + liveDocs.count() + " info.docCount=" + info.docCount + " info.getDelCount()=" + info.getDelCount(); - assert liveDocs.length() == info.docCount; + assert liveDocs.count() == info.info.docCount - info.getDelCount(): + "liveDocs.count()=" + liveDocs.count() + " info.docCount=" + info.info.docCount + " info.getDelCount()=" + info.getDelCount(); + assert liveDocs.length() == info.info.docCount; return liveDocs; } @Override - public void writeLiveDocs(MutableBits bits, Directory dir, SegmentInfo info, int newDelCount, IOContext context) throws IOException { - String filename = IndexFileNames.fileNameFromGeneration(info.name, DELETES_EXTENSION, info.getNextDelGen()); + public void writeLiveDocs(MutableBits bits, Directory dir, SegmentInfoPerCommit info, int newDelCount, IOContext context) throws IOException { + String filename = IndexFileNames.fileNameFromGeneration(info.info.name, DELETES_EXTENSION, info.getNextDelGen()); final BitVector liveDocs = (BitVector) bits; - assert liveDocs.count() == info.docCount - info.getDelCount() - newDelCount; - assert liveDocs.length() == info.docCount; + assert liveDocs.count() == info.info.docCount - info.getDelCount() - newDelCount; + assert liveDocs.length() == info.info.docCount; liveDocs.write(dir, filename, context); } @Override - public void files(SegmentInfo info, Set files) throws IOException { + public void files(SegmentInfoPerCommit info, Collection files) throws IOException { if (info.hasDeletions()) { - files.add(IndexFileNames.fileNameFromGeneration(info.name, DELETES_EXTENSION, info.getDelGen())); + files.add(IndexFileNames.fileNameFromGeneration(info.info.name, DELETES_EXTENSION, info.getDelGen())); } } } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40SegmentInfoReader.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40SegmentInfoReader.java index 00f066731cc..7e51bd22e3f 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40SegmentInfoReader.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40SegmentInfoReader.java @@ -55,7 +55,7 @@ public class Lucene40SegmentInfoReader extends SegmentInfoReader { final SegmentInfo si = new SegmentInfo(dir, version, segment, docCount, docStoreOffset, docStoreSegment, docStoreIsCompoundFile, normGen, isCompoundFile, - 0, null, diagnostics); + null, diagnostics); si.setFiles(files); success = true; diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40SegmentInfoWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40SegmentInfoWriter.java index 6ec1b145684..e8b4857224b 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40SegmentInfoWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40SegmentInfoWriter.java @@ -39,7 +39,6 @@ public class Lucene40SegmentInfoWriter extends SegmentInfoWriter { /** Save a single segment's info. */ @Override public void write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext) throws IOException { - assert si.getDelCount() <= si.docCount: "delCount=" + si.getDelCount() + " docCount=" + si.docCount + " segment=" + si.name; final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene40SegmentInfoFormat.SI_EXTENSION); assert si.getFiles() != null; si.getFiles().add(fileName); diff --git a/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextLiveDocsFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextLiveDocsFormat.java index 5eef0ee646a..f68b46a8810 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextLiveDocsFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextLiveDocsFormat.java @@ -19,11 +19,13 @@ package org.apache.lucene.codecs.simpletext; import java.io.IOException; import java.util.BitSet; +import java.util.Collection; import java.util.Set; import org.apache.lucene.codecs.LiveDocsFormat; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.index.SegmentInfoPerCommit; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; @@ -63,12 +65,12 @@ public class SimpleTextLiveDocsFormat extends LiveDocsFormat { } @Override - public Bits readLiveDocs(Directory dir, SegmentInfo info, IOContext context) throws IOException { + public Bits readLiveDocs(Directory dir, SegmentInfoPerCommit info, IOContext context) throws IOException { assert info.hasDeletions(); BytesRef scratch = new BytesRef(); CharsRef scratchUTF16 = new CharsRef(); - String fileName = IndexFileNames.fileNameFromGeneration(info.name, LIVEDOCS_EXTENSION, info.getDelGen()); + String fileName = IndexFileNames.fileNameFromGeneration(info.info.name, LIVEDOCS_EXTENSION, info.getDelGen()); IndexInput in = null; boolean success = false; try { @@ -105,12 +107,12 @@ public class SimpleTextLiveDocsFormat extends LiveDocsFormat { } @Override - public void writeLiveDocs(MutableBits bits, Directory dir, SegmentInfo info, int newDelCount, IOContext context) throws IOException { + public void writeLiveDocs(MutableBits bits, Directory dir, SegmentInfoPerCommit info, int newDelCount, IOContext context) throws IOException { BitSet set = ((SimpleTextBits) bits).bits; int size = bits.length(); BytesRef scratch = new BytesRef(); - String fileName = IndexFileNames.fileNameFromGeneration(info.name, LIVEDOCS_EXTENSION, info.getNextDelGen()); + String fileName = IndexFileNames.fileNameFromGeneration(info.info.name, LIVEDOCS_EXTENSION, info.getNextDelGen()); IndexOutput out = null; boolean success = false; try { @@ -138,9 +140,9 @@ public class SimpleTextLiveDocsFormat extends LiveDocsFormat { } @Override - public void files(SegmentInfo info, Set files) throws IOException { + public void files(SegmentInfoPerCommit info, Collection files) throws IOException { if (info.hasDeletions()) { - files.add(IndexFileNames.fileNameFromGeneration(info.name, LIVEDOCS_EXTENSION, info.getDelGen())); + files.add(IndexFileNames.fileNameFromGeneration(info.info.name, LIVEDOCS_EXTENSION, info.getDelGen())); } } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfosFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfosFormat.java index 08d88f9ac8d..9825768995e 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfosFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfosFormat.java @@ -25,6 +25,8 @@ import org.apache.lucene.codecs.SegmentInfoWriter; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.SegmentInfo; +// nocommit rename (remove s) + /** * plain text segments file format. *

diff --git a/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfosReader.java b/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfosReader.java index f81ee38a41d..817df05c2f4 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfosReader.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfosReader.java @@ -40,6 +40,8 @@ import org.apache.lucene.util.StringHelper; import static org.apache.lucene.codecs.simpletext.SimpleTextSegmentInfosWriter.*; +// nocommit rename (remove s) + /** * reads plaintext segments files *

@@ -97,7 +99,7 @@ public class SimpleTextSegmentInfosReader extends SegmentInfoReader { SegmentInfo info = new SegmentInfo(directory, version, segmentName, docCount, -1, segmentName, false, null, isCompoundFile, - 0, null, diagnostics); + null, diagnostics); info.setFiles(files); success = true; return info; diff --git a/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfosWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfosWriter.java index 4d476f953b5..a7b5acff7f7 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfosWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfosWriter.java @@ -35,6 +35,8 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; +// nocommit rename (remove s) + /** * writes plaintext segments files *

@@ -54,7 +56,6 @@ public class SimpleTextSegmentInfosWriter extends SegmentInfoWriter { @Override public void write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext) throws IOException { - assert si.getDelCount() <= si.docCount: "delCount=" + si.getDelCount() + " docCount=" + si.docCount + " segment=" + si.name; String segFileName = IndexFileNames.segmentFileName(si.name, "", SimpleTextSegmentInfosFormat.SI_EXTENSION); si.getFiles().add(segFileName); diff --git a/lucene/core/src/java/org/apache/lucene/index/BufferedDeletesStream.java b/lucene/core/src/java/org/apache/lucene/index/BufferedDeletesStream.java index 78f80f11fa7..8ac02f9f7f3 100644 --- a/lucene/core/src/java/org/apache/lucene/index/BufferedDeletesStream.java +++ b/lucene/core/src/java/org/apache/lucene/index/BufferedDeletesStream.java @@ -121,9 +121,9 @@ class BufferedDeletesStream { public final long gen; // If non-null, contains segments that are 100% deleted - public final List allDeleted; + public final List allDeleted; - ApplyDeletesResult(boolean anyDeletes, long gen, List allDeleted) { + ApplyDeletesResult(boolean anyDeletes, long gen, List allDeleted) { this.anyDeletes = anyDeletes; this.gen = gen; this.allDeleted = allDeleted; @@ -131,9 +131,9 @@ class BufferedDeletesStream { } // Sorts SegmentInfos from smallest to biggest bufferedDelGen: - private static final Comparator sortSegInfoByDelGen = new Comparator() { + private static final Comparator sortSegInfoByDelGen = new Comparator() { @Override - public int compare(SegmentInfo si1, SegmentInfo si2) { + public int compare(SegmentInfoPerCommit si1, SegmentInfoPerCommit si2) { final long cmp = si1.getBufferedDeletesGen() - si2.getBufferedDeletesGen(); if (cmp > 0) { return 1; @@ -148,7 +148,7 @@ class BufferedDeletesStream { /** Resolves the buffered deleted Term/Query/docIDs, into * actual deleted docIDs in the liveDocs MutableBits for * each SegmentReader. */ - public synchronized ApplyDeletesResult applyDeletes(IndexWriter.ReaderPool readerPool, List infos) throws IOException { + public synchronized ApplyDeletesResult applyDeletes(IndexWriter.ReaderPool readerPool, List infos) throws IOException { final long t0 = System.currentTimeMillis(); if (infos.size() == 0) { @@ -168,7 +168,7 @@ class BufferedDeletesStream { infoStream.message("BD", "applyDeletes: infos=" + infos + " packetCount=" + deletes.size()); } - List infos2 = new ArrayList(); + List infos2 = new ArrayList(); infos2.addAll(infos); Collections.sort(infos2, sortSegInfoByDelGen); @@ -178,13 +178,13 @@ class BufferedDeletesStream { int infosIDX = infos2.size()-1; int delIDX = deletes.size()-1; - List allDeleted = null; + List allDeleted = null; while (infosIDX >= 0) { //System.out.println("BD: cycle delIDX=" + delIDX + " infoIDX=" + infosIDX); final FrozenBufferedDeletes packet = delIDX >= 0 ? deletes.get(delIDX) : null; - final SegmentInfo info = infos2.get(infosIDX); + final SegmentInfoPerCommit info = infos2.get(infosIDX); final long segGen = info.getBufferedDeletesGen(); if (packet != null && segGen < packet.delGen()) { @@ -225,8 +225,8 @@ class BufferedDeletesStream { // already did that on flush: delCount += applyQueryDeletes(packet.queriesIterable(), rld, reader); final int fullDelCount = rld.info.getDelCount() + rld.getPendingDeleteCount(); - assert fullDelCount <= rld.info.docCount; - segAllDeletes = fullDelCount == rld.info.docCount; + assert fullDelCount <= rld.info.info.docCount; + segAllDeletes = fullDelCount == rld.info.info.docCount; } finally { rld.release(reader); readerPool.release(rld); @@ -235,7 +235,7 @@ class BufferedDeletesStream { if (segAllDeletes) { if (allDeleted == null) { - allDeleted = new ArrayList(); + allDeleted = new ArrayList(); } allDeleted.add(info); } @@ -271,8 +271,8 @@ class BufferedDeletesStream { delCount += applyTermDeletes(coalescedDeletes.termsIterable(), rld, reader); delCount += applyQueryDeletes(coalescedDeletes.queriesIterable(), rld, reader); final int fullDelCount = rld.info.getDelCount() + rld.getPendingDeleteCount(); - assert fullDelCount <= rld.info.docCount; - segAllDeletes = fullDelCount == rld.info.docCount; + assert fullDelCount <= rld.info.info.docCount; + segAllDeletes = fullDelCount == rld.info.info.docCount; } finally { rld.release(reader); readerPool.release(rld); @@ -281,7 +281,7 @@ class BufferedDeletesStream { if (segAllDeletes) { if (allDeleted == null) { - allDeleted = new ArrayList(); + allDeleted = new ArrayList(); } allDeleted.add(info); } @@ -316,7 +316,7 @@ class BufferedDeletesStream { public synchronized void prune(SegmentInfos segmentInfos) { assert checkDeleteStats(); long minGen = Long.MAX_VALUE; - for(SegmentInfo info : segmentInfos) { + for(SegmentInfoPerCommit info : segmentInfos) { minGen = Math.min(info.getBufferedDeletesGen(), minGen); } diff --git a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java index 47bcbc6d0ac..395f50fdcfb 100644 --- a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java +++ b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java @@ -361,8 +361,8 @@ public class CheckIndex { String oldSegs = null; boolean foundNonNullVersion = false; Comparator versionComparator = StringHelper.getVersionComparator(); - for (SegmentInfo si : sis) { - String version = si.getVersion(); + for (SegmentInfoPerCommit si : sis) { + String version = si.info.getVersion(); if (version == null) { // pre-3.1 segment oldSegs = "pre-3.1"; @@ -455,47 +455,48 @@ public class CheckIndex { result.maxSegmentName = -1; for(int i=0;i result.maxSegmentName) { result.maxSegmentName = segmentName; } - if (onlySegments != null && !onlySegments.contains(info.name)) + if (onlySegments != null && !onlySegments.contains(info.info.name)) { continue; + } Status.SegmentInfoStatus segInfoStat = new Status.SegmentInfoStatus(); result.segmentInfos.add(segInfoStat); - msg(" " + (1+i) + " of " + numSegments + ": name=" + info.name + " docCount=" + info.docCount); - segInfoStat.name = info.name; - segInfoStat.docCount = info.docCount; + msg(" " + (1+i) + " of " + numSegments + ": name=" + info.info.name + " docCount=" + info.info.docCount); + segInfoStat.name = info.info.name; + segInfoStat.docCount = info.info.docCount; - int toLoseDocCount = info.docCount; + int toLoseDocCount = info.info.docCount; SegmentReader reader = null; try { - final Codec codec = info.getCodec(); + final Codec codec = info.info.getCodec(); msg(" codec=" + codec); segInfoStat.codec = codec; - msg(" compound=" + info.getUseCompoundFile()); - segInfoStat.compound = info.getUseCompoundFile(); - msg(" numFiles=" + info.files().size()); - segInfoStat.numFiles = info.files().size(); + msg(" compound=" + info.info.getUseCompoundFile()); + segInfoStat.compound = info.info.getUseCompoundFile(); + msg(" numFiles=" + info.info.files().size()); + segInfoStat.numFiles = info.info.files().size(); segInfoStat.sizeMB = info.sizeInBytes()/(1024.*1024.); msg(" size (MB)=" + nf.format(segInfoStat.sizeMB)); - Map diagnostics = info.getDiagnostics(); + Map diagnostics = info.info.getDiagnostics(); segInfoStat.diagnostics = diagnostics; if (diagnostics.size() > 0) { msg(" diagnostics = " + diagnostics); } - final int docStoreOffset = info.getDocStoreOffset(); + final int docStoreOffset = info.info.getDocStoreOffset(); if (docStoreOffset != -1) { msg(" docStoreOffset=" + docStoreOffset); segInfoStat.docStoreOffset = docStoreOffset; - msg(" docStoreSegment=" + info.getDocStoreSegment()); - segInfoStat.docStoreSegment = info.getDocStoreSegment(); - msg(" docStoreIsCompoundFile=" + info.getDocStoreIsCompoundFile()); - segInfoStat.docStoreCompoundFile = info.getDocStoreIsCompoundFile(); + msg(" docStoreSegment=" + info.info.getDocStoreSegment()); + segInfoStat.docStoreSegment = info.info.getDocStoreSegment(); + msg(" docStoreIsCompoundFile=" + info.info.getDocStoreIsCompoundFile()); + segInfoStat.docStoreCompoundFile = info.info.getDocStoreIsCompoundFile(); } if (info.hasDeletions()) { @@ -516,14 +517,14 @@ public class CheckIndex { final int numDocs = reader.numDocs(); toLoseDocCount = numDocs; if (reader.hasDeletions()) { - if (reader.numDocs() != info.docCount - info.getDelCount()) { - throw new RuntimeException("delete count mismatch: info=" + (info.docCount - info.getDelCount()) + " vs reader=" + reader.numDocs()); + if (reader.numDocs() != info.info.docCount - info.getDelCount()) { + throw new RuntimeException("delete count mismatch: info=" + (info.info.docCount - info.getDelCount()) + " vs reader=" + reader.numDocs()); } - if ((info.docCount-reader.numDocs()) > reader.maxDoc()) { - throw new RuntimeException("too many deleted docs: maxDoc()=" + reader.maxDoc() + " vs del count=" + (info.docCount-reader.numDocs())); + if ((info.info.docCount-reader.numDocs()) > reader.maxDoc()) { + throw new RuntimeException("too many deleted docs: maxDoc()=" + reader.maxDoc() + " vs del count=" + (info.info.docCount-reader.numDocs())); } - if (info.docCount - numDocs != info.getDelCount()) { - throw new RuntimeException("delete count mismatch: info=" + info.getDelCount() + " vs reader=" + (info.docCount - numDocs)); + if (info.info.docCount - numDocs != info.getDelCount()) { + throw new RuntimeException("delete count mismatch: info=" + info.getDelCount() + " vs reader=" + (info.info.docCount - numDocs)); } Bits liveDocs = reader.getLiveDocs(); if (liveDocs == null) { @@ -540,11 +541,11 @@ public class CheckIndex { } } - segInfoStat.numDeleted = info.docCount - numDocs; + segInfoStat.numDeleted = info.info.docCount - numDocs; msg("OK [" + (segInfoStat.numDeleted) + " deleted docs]"); } else { if (info.getDelCount() != 0) { - throw new RuntimeException("delete count mismatch: info=" + info.getDelCount() + " vs reader=" + (info.docCount - numDocs)); + throw new RuntimeException("delete count mismatch: info=" + info.getDelCount() + " vs reader=" + (info.info.docCount - numDocs)); } Bits liveDocs = reader.getLiveDocs(); if (liveDocs != null) { @@ -557,8 +558,9 @@ public class CheckIndex { } msg("OK"); } - if (reader.maxDoc() != info.docCount) - throw new RuntimeException("SegmentReader.maxDoc() " + reader.maxDoc() + " != SegmentInfos.docCount " + info.docCount); + if (reader.maxDoc() != info.info.docCount) { + throw new RuntimeException("SegmentReader.maxDoc() " + reader.maxDoc() + " != SegmentInfos.docCount " + info.info.docCount); + } // Test getFieldInfos() if (infoStream != null) { @@ -1158,7 +1160,7 @@ public class CheckIndex { /** * Test stored fields for a segment. */ - private Status.StoredFieldStatus testStoredFields(SegmentInfo info, SegmentReader reader, NumberFormat format) { + private Status.StoredFieldStatus testStoredFields(SegmentInfoPerCommit info, SegmentReader reader, NumberFormat format) { final Status.StoredFieldStatus status = new Status.StoredFieldStatus(); try { @@ -1168,7 +1170,7 @@ public class CheckIndex { // Scan stored fields for all documents final Bits liveDocs = reader.getLiveDocs(); - for (int j = 0; j < info.docCount; ++j) { + for (int j = 0; j < info.info.docCount; ++j) { // Intentionally pull even deleted documents to // make sure they too are not corrupt: Document doc = reader.document(j); @@ -1282,7 +1284,7 @@ public class CheckIndex { } } - private Status.DocValuesStatus testDocValues(SegmentInfo info, + private Status.DocValuesStatus testDocValues(SegmentInfoPerCommit info, FieldInfos fieldInfos, SegmentReader reader) { final Status.DocValuesStatus status = new Status.DocValuesStatus(); @@ -1317,7 +1319,7 @@ public class CheckIndex { /** * Test term vectors for a segment. */ - private Status.TermVectorStatus testTermVectors(FieldInfos fieldInfos, SegmentInfo info, SegmentReader reader, NumberFormat format) { + private Status.TermVectorStatus testTermVectors(FieldInfos fieldInfos, SegmentInfoPerCommit info, SegmentReader reader, NumberFormat format) { final Status.TermVectorStatus status = new Status.TermVectorStatus(); final Bits onlyDocIsDeleted = new FixedBitSet(1); @@ -1347,7 +1349,7 @@ public class CheckIndex { TermsEnum termsEnum = null; TermsEnum postingsTermsEnum = null; - for (int j = 0; j < info.docCount; ++j) { + for (int j = 0; j < info.info.docCount; ++j) { // Intentionally pull/visit (but don't count in // stats) deleted documents to make sure they too // are not corrupt: diff --git a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java index 9d444bbe2d9..3a24a727f50 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java @@ -494,7 +494,7 @@ final class DocumentsWriter { private void publishFlushedSegment(FlushedSegment newSegment, FrozenBufferedDeletes globalPacket) throws IOException { assert newSegment != null; - final SegmentInfo segInfo = indexWriter.prepareFlushedSegment(newSegment); + final SegmentInfoPerCommit segInfo = indexWriter.prepareFlushedSegment(newSegment); final BufferedDeletes deletes = newSegment.segmentDeletes; if (infoStream.isEnabled("DW")) { infoStream.message("DW", Thread.currentThread().getName() + ": publishFlushedSegment seg-private deletes=" + deletes); diff --git a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java index 14e8a87fc6b..8187556d7a2 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java +++ b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java @@ -116,13 +116,13 @@ class DocumentsWriterPerThread { } static class FlushedSegment { - final SegmentInfo segmentInfo; + final SegmentInfoPerCommit segmentInfo; final FieldInfos fieldInfos; final BufferedDeletes segmentDeletes; final MutableBits liveDocs; final int delCount; - private FlushedSegment(SegmentInfo segmentInfo, FieldInfos fieldInfos, + private FlushedSegment(SegmentInfoPerCommit segmentInfo, FieldInfos fieldInfos, BufferedDeletes segmentDeletes, MutableBits liveDocs, int delCount) { this.segmentInfo = segmentInfo; this.fieldInfos = fieldInfos; @@ -485,7 +485,7 @@ class DocumentsWriterPerThread { consumer.flush(flushState); pendingDeletes.terms.clear(); final SegmentInfo newSegment = new SegmentInfo(directoryOrig, Constants.LUCENE_MAIN_VERSION, segment, flushState.numDocs, - -1, segment, false, null, false, 0, + -1, segment, false, null, false, flushState.codec, null); newSegment.setFiles(new HashSet(directory.getCreatedFiles())); @@ -523,7 +523,8 @@ class DocumentsWriterPerThread { doAfterFlush(); success = true; - return new FlushedSegment(newSegment, flushState.fieldInfos, segmentDeletes, flushState.liveDocs, flushState.delCountOnFlush); + return new FlushedSegment(new SegmentInfoPerCommit(newSegment, 0, -1L), flushState.fieldInfos, + segmentDeletes, flushState.liveDocs, flushState.delCountOnFlush); } finally { if (!success) { if (segment != null) { diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexFileNames.java b/lucene/core/src/java/org/apache/lucene/index/IndexFileNames.java index b7b5044e3cd..98452d336c9 100644 --- a/lucene/core/src/java/org/apache/lucene/index/IndexFileNames.java +++ b/lucene/core/src/java/org/apache/lucene/index/IndexFileNames.java @@ -80,11 +80,12 @@ public final class IndexFileNames { * @param gen generation */ public static String fileNameFromGeneration(String base, String ext, long gen) { - if (gen == SegmentInfo.NO) { + if (gen == -1) { return null; - } else if (gen == SegmentInfo.WITHOUT_GEN) { + } else if (gen == 0) { return segmentFileName(base, "", ext); } else { + assert gen > 0; // The '6' part in the length is: 1 for '.', 1 for '_' and 4 as estimate // to the gen length as string (hopefully an upper limit so SB won't // expand in the middle. diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java index 8fc60f52257..32d8a14be32 100644 --- a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java @@ -206,7 +206,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { private volatile long changeCount; // increments every time a change is completed private long lastCommitChangeCount; // last changeCount that was committed - private List rollbackSegments; // list of segmentInfo we will fallback to if the commit fails + private List rollbackSegments; // list of segmentInfo we will fallback to if the commit fails volatile SegmentInfos pendingCommit; // set when a commit is pending (after prepareCommit() & before commit()) volatile long pendingCommitChangeCount; @@ -220,7 +220,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { final IndexFileDeleter deleter; // used by forceMerge to note those needing merging - private Map segmentsToMerge = new HashMap(); + private Map segmentsToMerge = new HashMap(); private int mergeMaxNumSegments; private Lock writeLock; @@ -230,7 +230,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { // Holds all SegmentInfo instances currently involved in // merges - private HashSet mergingSegments = new HashSet(); + private HashSet mergingSegments = new HashSet(); private MergePolicy mergePolicy; private final MergeScheduler mergeScheduler; @@ -402,17 +402,17 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { class ReaderPool { - private final Map readerMap = new HashMap(); + private final Map readerMap = new HashMap(); // used only by asserts - public synchronized boolean infoIsLive(SegmentInfo info) { + public synchronized boolean infoIsLive(SegmentInfoPerCommit info) { int idx = segmentInfos.indexOf(info); assert idx != -1: "info=" + info + " isn't live"; assert segmentInfos.info(idx) == info: "info=" + info + " doesn't match live info in segmentInfos"; return true; } - public synchronized void drop(SegmentInfo info) throws IOException { + public synchronized void drop(SegmentInfoPerCommit info) throws IOException { final ReadersAndLiveDocs rld = readerMap.get(info); if (rld != null) { assert info == rld.info; @@ -448,7 +448,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { /** Remove all our references to readers, and commits * any pending changes. */ synchronized void dropAll(boolean doSave) throws IOException { - final Iterator> it = readerMap.entrySet().iterator(); + final Iterator> it = readerMap.entrySet().iterator(); while(it.hasNext()) { final ReadersAndLiveDocs rld = it.next().getValue(); if (doSave && rld.writeLiveDocs(directory)) { @@ -481,7 +481,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { * @throws IOException */ public synchronized void commit(SegmentInfos infos) throws IOException { - for (SegmentInfo info : infos) { + for (SegmentInfoPerCommit info : infos) { final ReadersAndLiveDocs rld = readerMap.get(info); if (rld != null) { assert rld.info == info; @@ -502,9 +502,9 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { * {@link #release(ReadersAndLiveDocs)}. * @throws IOException */ - public synchronized ReadersAndLiveDocs get(SegmentInfo info, boolean create) { + public synchronized ReadersAndLiveDocs get(SegmentInfoPerCommit info, boolean create) { - assert info.dir == directory: "info.dir=" + info.dir + " vs " + directory; + assert info.info.dir == directory: "info.dir=" + info.info.dir + " vs " + directory; ReadersAndLiveDocs rld = readerMap.get(info); if (rld == null) { @@ -532,7 +532,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { * If the reader isn't being pooled, the segmentInfo's * delCount is returned. */ - public int numDeletedDocs(SegmentInfo info) throws IOException { + public int numDeletedDocs(SegmentInfoPerCommit info) throws IOException { ensureOpen(false); int delCount = info.getDelCount(); @@ -660,7 +660,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } } - rollbackSegments = segmentInfos.createBackupSegmentInfos(true); + rollbackSegments = segmentInfos.createBackupSegmentInfos(); // start with previous field numbers, but new FieldInfos globalFieldNumberMap = getFieldNumberMap(); @@ -705,22 +705,22 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } } - private FieldInfos getFieldInfos(SegmentInfo info) throws IOException { + private FieldInfos getFieldInfos(SegmentInfoPerCommit info) throws IOException { Directory cfsDir = null; try { - if (info.getUseCompoundFile()) { + if (info.info.getUseCompoundFile()) { cfsDir = new CompoundFileDirectory(directory, - IndexFileNames.segmentFileName(info.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), + IndexFileNames.segmentFileName(info.info.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), IOContext.READONCE, false); } else { cfsDir = directory; } - return info.getCodec().fieldInfosFormat().getFieldInfosReader().read(cfsDir, - info.name, - IOContext.READONCE); + return info.info.getCodec().fieldInfosFormat().getFieldInfosReader().read(cfsDir, + info.info.name, + IOContext.READONCE); } finally { - if (info.getUseCompoundFile() && cfsDir != null) { + if (info.info.getUseCompoundFile() && cfsDir != null) { cfsDir.close(); } } @@ -733,9 +733,9 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { private FieldNumberBiMap getFieldNumberMap() throws IOException { final FieldNumberBiMap map = new FieldNumberBiMap(); - SegmentInfo biggest = null; - for(SegmentInfo info : segmentInfos) { - if (biggest == null || (info.docCount-info.getDelCount()) > (biggest.docCount-biggest.getDelCount())) { + SegmentInfoPerCommit biggest = null; + for(SegmentInfoPerCommit info : segmentInfos) { + if (biggest == null || (info.info.docCount-info.getDelCount()) > (biggest.info.docCount-biggest.getDelCount())) { biggest = info; } } @@ -998,8 +998,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { else count = 0; - for (final SegmentInfo info : segmentInfos) { - count += info.docCount - numDeletedDocs(info); + for (final SegmentInfoPerCommit info : segmentInfos) { + count += info.info.docCount - numDeletedDocs(info); } return count; } @@ -1012,7 +1012,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { if (docWriter.anyDeletions()) { return true; } - for (final SegmentInfo info : segmentInfos) { + for (final SegmentInfoPerCommit info : segmentInfos) { if (info.hasDeletions()) { return true; } @@ -1367,7 +1367,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { // for test purpose final synchronized int getDocCount(int i) { if (i >= 0 && i < segmentInfos.size()) { - return segmentInfos.info(i).docCount; + return segmentInfos.info(i).info.docCount; } else { return -1; } @@ -1496,7 +1496,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { synchronized(this) { resetMergeExceptions(); segmentsToMerge.clear(); - for(SegmentInfo info : segmentInfos) { + for(SegmentInfoPerCommit info : segmentInfos) { segmentsToMerge.put(info, Boolean.TRUE); } mergeMaxNumSegments = maxNumSegments; @@ -1633,8 +1633,9 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { running = false; for(int i=0;iDo not alter the returned collection! */ - public synchronized Collection getMergingSegments() { + public synchronized Collection getMergingSegments() { return mergingSegments; } @@ -1762,9 +1763,9 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { * @lucene.experimental */ public synchronized MergePolicy.OneMerge getNextMerge() { - if (pendingMerges.size() == 0) + if (pendingMerges.size() == 0) { return null; - else { + } else { // Advance the merge from pending to running MergePolicy.OneMerge merge = pendingMerges.removeFirst(); runningMerges.add(merge); @@ -2017,22 +2018,22 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { * * @see #publishFlushedSegment(SegmentInfo, FrozenBufferedDeletes, FrozenBufferedDeletes) */ - SegmentInfo prepareFlushedSegment(FlushedSegment flushedSegment) throws IOException { + SegmentInfoPerCommit prepareFlushedSegment(FlushedSegment flushedSegment) throws IOException { assert flushedSegment != null; - SegmentInfo newSegment = flushedSegment.segmentInfo; + SegmentInfoPerCommit newSegment = flushedSegment.segmentInfo; - setDiagnostics(newSegment, "flush"); + setDiagnostics(newSegment.info, "flush"); - IOContext context = new IOContext(new FlushInfo(newSegment.docCount, newSegment.sizeInBytes())); + IOContext context = new IOContext(new FlushInfo(newSegment.info.docCount, newSegment.info.sizeInBytes())); boolean success = false; try { if (useCompoundFile(newSegment)) { // Now build compound file - Collection oldFiles = createCompoundFile(infoStream, directory, MergeState.CheckAbort.NONE, newSegment, context); - newSegment.setUseCompoundFile(true); + Collection oldFiles = createCompoundFile(infoStream, directory, MergeState.CheckAbort.NONE, newSegment.info, context); + newSegment.info.setUseCompoundFile(true); synchronized(this) { deleter.deleteNewFiles(oldFiles); @@ -2043,7 +2044,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { // creating CFS so that 1) .si isn't slurped into CFS, // and 2) .si reflects useCompoundFile=true change // above: - codec.segmentInfosFormat().getSegmentInfosWriter().write(directory, newSegment, flushedSegment.fieldInfos, context); + codec.segmentInfosFormat().getSegmentInfosWriter().write(directory, newSegment.info, flushedSegment.fieldInfos, context); // nocommit ideally we would freeze newSegment here!! // because any changes after writing the .si will be @@ -2064,8 +2065,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { // carry the changes; there's no reason to use // filesystem as intermediary here. - SegmentInfo info = flushedSegment.segmentInfo; - Codec codec = info.getCodec(); + SegmentInfoPerCommit info = flushedSegment.segmentInfo; + Codec codec = info.info.getCodec(); codec.liveDocsFormat().writeLiveDocs(flushedSegment.liveDocs, directory, info, delCount, context); newSegment.setDelCount(delCount); newSegment.advanceDelGen(); @@ -2076,11 +2077,11 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { if (!success) { if (infoStream.isEnabled("IW")) { infoStream.message("IW", "hit exception " + - "reating compound file for newly flushed segment " + newSegment.name); + "reating compound file for newly flushed segment " + newSegment.info.name); } synchronized(this) { - deleter.refresh(newSegment.name); + deleter.refresh(newSegment.info.name); } } } @@ -2102,7 +2103,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { * * @see #prepareFlushedSegment(FlushedSegment) */ - synchronized void publishFlushedSegment(SegmentInfo newSegment, + synchronized void publishFlushedSegment(SegmentInfoPerCommit newSegment, FrozenBufferedDeletes packet, FrozenBufferedDeletes globalPacket) throws IOException { // Lock order IW -> BDS synchronized (bufferedDeletesStream) { @@ -2132,7 +2133,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } } - synchronized boolean useCompoundFile(SegmentInfo segmentInfo) throws IOException { + synchronized boolean useCompoundFile(SegmentInfoPerCommit segmentInfo) throws IOException { return mergePolicy.useCompoundFile(segmentInfos, segmentInfo); } @@ -2208,7 +2209,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { flush(false, true); - List infos = new ArrayList(); + List infos = new ArrayList(); for (Directory dir : dirs) { if (infoStream.isEnabled("IW")) { infoStream.message("IW", "addIndexes: process directory " + dir); @@ -2218,17 +2219,17 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { final Set dsFilesCopied = new HashSet(); final Map dsNames = new HashMap(); final Set copiedFiles = new HashSet(); - for (SegmentInfo info : sis) { - assert !infos.contains(info): "dup info dir=" + info.dir + " name=" + info.name; + for (SegmentInfoPerCommit info : sis) { + assert !infos.contains(info): "dup info dir=" + info.info.dir + " name=" + info.info.name; String newSegName = newSegmentName(); - String dsName = info.getDocStoreSegment(); + String dsName = info.info.getDocStoreSegment(); if (infoStream.isEnabled("IW")) { - infoStream.message("IW", "addIndexes: process segment origName=" + info.name + " newName=" + newSegName + " dsName=" + dsName + " info=" + info); + infoStream.message("IW", "addIndexes: process segment origName=" + info.info.name + " newName=" + newSegName + " dsName=" + dsName + " info=" + info); } - IOContext context = new IOContext(new MergeInfo(info.docCount, info.sizeInBytes(), true, -1)); + IOContext context = new IOContext(new MergeInfo(info.info.docCount, info.info.sizeInBytes(), true, -1)); infos.add(copySegmentAsIs(info, newSegName, dsNames, dsFilesCopied, context, copiedFiles)); } @@ -2297,8 +2298,10 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { MergeState mergeState = merger.merge(); // merge 'em SegmentInfo info = new SegmentInfo(directory, Constants.LUCENE_MAIN_VERSION, mergedName, mergeState.mergedDocCount, - -1, mergedName, false, null, false, 0, + -1, mergedName, false, null, false, codec, null); + SegmentInfoPerCommit infoPerCommit = new SegmentInfoPerCommit(info, 0, -1L); + info.setFiles(new HashSet(trackingDir.getCreatedFiles())); trackingDir.getCreatedFiles().clear(); @@ -2307,16 +2310,16 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { boolean useCompoundFile; synchronized(this) { // Guard segmentInfos if (stopMerges) { - deleter.deleteNewFiles(info.files()); + deleter.deleteNewFiles(infoPerCommit.files()); return; } ensureOpen(); - useCompoundFile = mergePolicy.useCompoundFile(segmentInfos, info); + useCompoundFile = mergePolicy.useCompoundFile(segmentInfos, infoPerCommit); } // Now create the compound file if needed if (useCompoundFile) { - Collection filesToDelete = info.files(); + Collection filesToDelete = infoPerCommit.files(); createCompoundFile(infoStream, directory, MergeState.CheckAbort.NONE, info, context); // delete new non cfs files directly: they were never @@ -2341,7 +2344,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { return; } ensureOpen(); - segmentInfos.add(info); + segmentInfos.add(infoPerCommit); checkpoint(); } } catch (OutOfMemoryError oom) { @@ -2350,15 +2353,15 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } /** Copies the segment files as-is into the IndexWriter's directory. */ - private SegmentInfo copySegmentAsIs(SegmentInfo info, String segName, - Map dsNames, Set dsFilesCopied, IOContext context, - Set copiedFiles) + private SegmentInfoPerCommit copySegmentAsIs(SegmentInfoPerCommit info, String segName, + Map dsNames, Set dsFilesCopied, IOContext context, + Set copiedFiles) throws IOException { // Determine if the doc store of this segment needs to be copied. It's // only relevant for segments that share doc store with others, // because the DS might have been copied already, in which case we // just want to update the DS name of this SegmentInfo. - String dsName = info.getDocStoreSegment(); + String dsName = info.info.getDocStoreSegment(); assert dsName != null; final String newDsName; if (dsNames.containsKey(dsName)) { @@ -2369,34 +2372,34 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } Set codecDocStoreFiles = new HashSet(); - final boolean hasSharedDocStore = info.getDocStoreOffset() != -1; - final String segmentInfoFileName3X = IndexFileNames.segmentFileName(info.name, + final boolean hasSharedDocStore = info.info.getDocStoreOffset() != -1; + final String segmentInfoFileName3X = IndexFileNames.segmentFileName(info.info.name, "", Lucene3xSegmentInfoFormat.SI_EXTENSION); if (hasSharedDocStore) { // only violate the codec this way if it's preflex & // shares doc stores - assert info.getDocStoreSegment() != null; + assert info.info.getDocStoreSegment() != null; // nocommit what to do.... - if (info.getDocStoreIsCompoundFile()) { - codecDocStoreFiles.add(IndexFileNames.segmentFileName(info.getDocStoreSegment(), "", "cfx")); + if (info.info.getDocStoreIsCompoundFile()) { + codecDocStoreFiles.add(IndexFileNames.segmentFileName(info.info.getDocStoreSegment(), "", "cfx")); } else { - codecDocStoreFiles.add(IndexFileNames.segmentFileName(info.getDocStoreSegment(), "", "fdt")); - codecDocStoreFiles.add(IndexFileNames.segmentFileName(info.getDocStoreSegment(), "", "fdx")); - codecDocStoreFiles.add(IndexFileNames.segmentFileName(info.getDocStoreSegment(), "", "tvx")); - codecDocStoreFiles.add(IndexFileNames.segmentFileName(info.getDocStoreSegment(), "", "tvf")); - codecDocStoreFiles.add(IndexFileNames.segmentFileName(info.getDocStoreSegment(), "", "tvd")); + codecDocStoreFiles.add(IndexFileNames.segmentFileName(info.info.getDocStoreSegment(), "", "fdt")); + codecDocStoreFiles.add(IndexFileNames.segmentFileName(info.info.getDocStoreSegment(), "", "fdx")); + codecDocStoreFiles.add(IndexFileNames.segmentFileName(info.info.getDocStoreSegment(), "", "tvx")); + codecDocStoreFiles.add(IndexFileNames.segmentFileName(info.info.getDocStoreSegment(), "", "tvf")); + codecDocStoreFiles.add(IndexFileNames.segmentFileName(info.info.getDocStoreSegment(), "", "tvd")); } } - //System.out.println("copy seg=" + info.name + " version=" + info.getVersion()); + //System.out.println("copy seg=" + info.info.name + " version=" + info.info.getVersion()); // Same SI as before but we change directory, name and docStoreSegment: - SegmentInfo newInfo = new SegmentInfo(directory, info.getVersion(), segName, info.docCount, info.getDocStoreOffset(), - newDsName, info.getDocStoreIsCompoundFile(), info.getNormGen(), info.getUseCompoundFile(), - info.getDelCount(), info.getCodec(), info.getDiagnostics()); - newInfo.setDelGen(info.getDelGen()); + SegmentInfo newInfo = new SegmentInfo(directory, info.info.getVersion(), segName, info.info.docCount, info.info.getDocStoreOffset(), + newDsName, info.info.getDocStoreIsCompoundFile(), info.info.getNormGen(), info.info.getUseCompoundFile(), + info.info.getCodec(), info.info.getDiagnostics()); + SegmentInfoPerCommit newInfoPerCommit = new SegmentInfoPerCommit(newInfo, info.getDelCount(), info.getDelGen()); Set segFiles = new HashSet(); @@ -2446,10 +2449,10 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { assert !directory.fileExists(newFileName): "file \"" + newFileName + "\" already exists"; assert !copiedFiles.contains(file): "file \"" + file + "\" is being copied more than once"; copiedFiles.add(file); - info.dir.copy(directory, file, newFileName, context); + info.info.dir.copy(directory, file, newFileName, context); } - return newInfo; + return newInfoPerCommit; } /** @@ -2695,7 +2698,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { lastCommitChangeCount = pendingCommitChangeCount; segmentInfos.updateGeneration(pendingCommit); segmentInfos.setUserData(pendingCommit.getUserData()); - rollbackSegments = pendingCommit.createBackupSegmentInfos(true); + rollbackSegments = pendingCommit.createBackupSegmentInfos(); deleter.checkpoint(pendingCommit, true); } finally { // Matches the incRef done in prepareCommit: @@ -2813,7 +2816,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { if (infoStream.isEnabled("IW")) { infoStream.message("IW", "drop 100% deleted segments: " + segString(result.allDeleted)); } - for (SegmentInfo info : result.allDeleted) { + for (SegmentInfoPerCommit info : result.allDeleted) { // If a merge has already registered for this // segment, we leave it in the readerPool; the // merge will skip merging it and will then drop @@ -2851,9 +2854,9 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } private synchronized void ensureValidMerge(MergePolicy.OneMerge merge) throws IOException { - for(SegmentInfo info : merge.segments) { + for(SegmentInfoPerCommit info : merge.segments) { if (!segmentInfos.contains(info)) { - throw new MergePolicy.MergeException("MergePolicy selected a segment (" + info.name + ") that is not in the current index " + segString(), directory); + throw new MergePolicy.MergeException("MergePolicy selected a segment (" + info.info.name + ") that is not in the current index " + segString(), directory); } } } @@ -2871,7 +2874,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { assert testPoint("startCommitMergeDeletes"); - final List sourceSegments = merge.segments; + final List sourceSegments = merge.segments; if (infoStream.isEnabled("IW")) { infoStream.message("IW", "commitMergeDeletes " + segString(merge.segments)); @@ -2886,14 +2889,14 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { ReadersAndLiveDocs mergedDeletes = null; for(int i=0; i < sourceSegments.size(); i++) { - SegmentInfo info = sourceSegments.get(i); + SegmentInfoPerCommit info = sourceSegments.get(i); minGen = Math.min(info.getBufferedDeletesGen(), minGen); - final int docCount = info.docCount; + final int docCount = info.info.docCount; final Bits prevLiveDocs = merge.readerLiveDocs.get(i); final Bits currentLiveDocs; final ReadersAndLiveDocs rld = readerPool.get(info, false); // We hold a ref so it should still be in the pool: - assert rld != null: "seg=" + info.name; + assert rld != null: "seg=" + info.info.name; currentLiveDocs = rld.getLiveDocs(); if (prevLiveDocs != null) { @@ -2936,7 +2939,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } } } else { - docUpto += info.docCount - info.getDelCount() - rld.getPendingDeleteCount(); + docUpto += info.info.docCount - info.getDelCount() - rld.getPendingDeleteCount(); } } else if (currentLiveDocs != null) { assert currentLiveDocs.length() == docCount; @@ -2954,11 +2957,11 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } } else { // No deletes before or after - docUpto += info.docCount; + docUpto += info.info.docCount; } } - assert docUpto == merge.info.docCount; + assert docUpto == merge.info.info.docCount; if (infoStream.isEnabled("IW")) { if (mergedDeletes == null) { @@ -3007,7 +3010,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { return false; } - final ReadersAndLiveDocs mergedDeletes = merge.info.docCount == 0 ? null : commitMergedDeletes(merge); + final ReadersAndLiveDocs mergedDeletes = merge.info.info.docCount == 0 ? null : commitMergedDeletes(merge); assert mergedDeletes == null || mergedDeletes.getPendingDeleteCount() != 0; @@ -3019,9 +3022,9 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { assert !segmentInfos.contains(merge.info); final boolean allDeleted = merge.segments.size() == 0 || - merge.info.docCount == 0 || + merge.info.info.docCount == 0 || (mergedDeletes != null && - mergedDeletes.getPendingDeleteCount() == merge.info.docCount); + mergedDeletes.getPendingDeleteCount() == merge.info.info.docCount); if (infoStream.isEnabled("IW")) { if (allDeleted) { @@ -3035,7 +3038,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { // the new segment: assert merge.segments.size() > 0 || dropSegment; - assert merge.info.docCount != 0 || keepFullyDeletedSegments || dropSegment; + assert merge.info.info.docCount != 0 || keepFullyDeletedSegments || dropSegment; segmentInfos.applyMergeChanges(merge, dropSegment); @@ -3141,7 +3144,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { infoStream.message("IW", "hit exception during merge"); } if (merge.info != null && !segmentInfos.contains(merge.info)) { - deleter.refresh(merge.info.name); + deleter.refresh(merge.info.info.name); } } @@ -3158,7 +3161,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } if (merge.info != null) { if (infoStream.isEnabled("IW")) { - infoStream.message("IW", "merge time " + (System.currentTimeMillis()-t0) + " msec for " + merge.info.docCount + " docs"); + infoStream.message("IW", "merge time " + (System.currentTimeMillis()-t0) + " msec for " + merge.info.info.docCount + " docs"); } } } @@ -3186,14 +3189,14 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } boolean isExternal = false; - for(SegmentInfo info : merge.segments) { + for(SegmentInfoPerCommit info : merge.segments) { if (mergingSegments.contains(info)) { return false; } if (!segmentInfos.contains(info)) { return false; } - if (info.dir != directory) { + if (info.info.dir != directory) { isExternal = true; } if (segmentsToMerge.containsKey(info)) { @@ -3218,8 +3221,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { // threads, start if (infoStream.isEnabled("IW")) { StringBuilder builder = new StringBuilder("registerMerge merging= ["); - for (SegmentInfo info : mergingSegments) { - builder.append(info.name).append(", "); + for (SegmentInfoPerCommit info : mergingSegments) { + builder.append(info.info.name).append(", "); } builder.append("]"); // don't call mergingSegments.toString() could lead to ConcurrentModException @@ -3228,7 +3231,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { infoStream.message("IW", builder.toString()); } } - for(SegmentInfo info : merge.segments) { + for(SegmentInfoPerCommit info : merge.segments) { if (infoStream.isEnabled("IW")) { infoStream.message("IW", "registerMerge info=" + info); } @@ -3294,7 +3297,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { if (infoStream.isEnabled("IW")) { infoStream.message("IW", "drop 100% deleted segments: " + result.allDeleted); } - for(SegmentInfo info : result.allDeleted) { + for(SegmentInfoPerCommit info : result.allDeleted) { segmentInfos.remove(info); if (merge.segments.contains(info)) { mergingSegments.remove(info); @@ -3313,22 +3316,23 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { // ConcurrentMergePolicy we keep deterministic segment // names. final String mergeSegmentName = newSegmentName(); - merge.info = new SegmentInfo(directory, Constants.LUCENE_MAIN_VERSION, mergeSegmentName, 0, -1, mergeSegmentName, false, null, false, 0, codec, details); + SegmentInfo si = new SegmentInfo(directory, Constants.LUCENE_MAIN_VERSION, mergeSegmentName, 0, -1, mergeSegmentName, false, null, false, codec, details); + merge.info = new SegmentInfoPerCommit(si, 0, -1L); // Lock order: IW -> BD bufferedDeletesStream.prune(segmentInfos); if (infoStream.isEnabled("IW")) { - infoStream.message("IW", "merge seg=" + merge.info.name); + infoStream.message("IW", "merge seg=" + merge.info.info.name); } assert merge.estimatedMergeBytes == 0; - for(SegmentInfo info : merge.segments) { - if (info.docCount > 0) { + for(SegmentInfoPerCommit info : merge.segments) { + if (info.info.docCount > 0) { final int delCount = numDeletedDocs(info); - assert delCount <= info.docCount; - final double delRatio = ((double) delCount)/info.docCount; - merge.estimatedMergeBytes += info.sizeInBytes() * (1.0 - delRatio); + assert delCount <= info.info.docCount; + final double delRatio = ((double) delCount)/info.info.docCount; + merge.estimatedMergeBytes += info.info.sizeInBytes() * (1.0 - delRatio); } } } @@ -3363,8 +3367,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { // It's possible we are called twice, eg if there was an // exception inside mergeInit if (merge.registerDone) { - final List sourceSegments = merge.segments; - for(SegmentInfo info : sourceSegments) { + final List sourceSegments = merge.segments; + for(SegmentInfoPerCommit info : sourceSegments) { mergingSegments.remove(info); } merge.registerDone = false; @@ -3420,9 +3424,9 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { merge.checkAborted(directory); - final String mergedName = merge.info.name; + final String mergedName = merge.info.info.name; - List sourceSegments = merge.segments; + List sourceSegments = merge.segments; IOContext context = new IOContext(merge.getMergeInfo()); @@ -3445,7 +3449,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { int segUpto = 0; while(segUpto < sourceSegments.size()) { - final SegmentInfo info = sourceSegments.get(segUpto); + final SegmentInfoPerCommit info = sourceSegments.get(segUpto); // Hold onto the "live" reader; we will use this to // commit merged deletes @@ -3478,8 +3482,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } merge.readerLiveDocs.add(liveDocs); merge.readers.add(reader); - assert delCount <= info.docCount: "delCount=" + delCount + " info.docCount=" + info.docCount + " rld.pendingDeleteCount=" + rld.getPendingDeleteCount() + " info.getDelCount()=" + info.getDelCount(); - if (delCount < info.docCount) { + assert delCount <= info.info.docCount: "delCount=" + delCount + " info.docCount=" + info.info.docCount + " rld.pendingDeleteCount=" + rld.getPendingDeleteCount() + " info.getDelCount()=" + info.getDelCount(); + if (delCount < info.info.docCount) { merger.add(reader, liveDocs); } segUpto++; @@ -3489,13 +3493,13 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { // This is where all the work happens: MergeState mergeState = merger.merge(); - merge.info.docCount = mergeState.mergedDocCount; - merge.info.setFiles(new HashSet(dirWrapper.getCreatedFiles())); + merge.info.info.docCount = mergeState.mergedDocCount; + merge.info.info.setFiles(new HashSet(dirWrapper.getCreatedFiles())); // Record which codec was used to write the segment if (infoStream.isEnabled("IW")) { - infoStream.message("IW", "merge codec=" + codec + " docCount=" + merge.info.docCount + "; merged segment has " + + infoStream.message("IW", "merge codec=" + codec + " docCount=" + merge.info.info.docCount + "; merged segment has " + (mergeState.fieldInfos.hasVectors() ? "vectors" : "no vectors") + "; " + (mergeState.fieldInfos.hasNorms() ? "norms" : "no norms") + "; " + (mergeState.fieldInfos.hasDocValues() ? "docValues" : "no docValues") + "; " + @@ -3518,7 +3522,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { Collection filesToRemove = merge.info.files(); try { - filesToRemove = createCompoundFile(infoStream, directory, checkAbort, merge.info, context); + filesToRemove = createCompoundFile(infoStream, directory, checkAbort, merge.info.info, context); success = true; } catch (IOException ioe) { synchronized(this) { @@ -3565,7 +3569,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } } - merge.info.setUseCompoundFile(true); + merge.info.info.setUseCompoundFile(true); } // Have codec write SegmentInfo. Must do this after @@ -3574,7 +3578,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { // above: boolean success2 = false; try { - codec.segmentInfosFormat().getSegmentInfosWriter().write(directory, merge.info, mergeState.fieldInfos, context); + codec.segmentInfosFormat().getSegmentInfosWriter().write(directory, merge.info.info, mergeState.fieldInfos, context); success2 = true; } finally { if (!success2) { @@ -3589,7 +3593,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { // lost... if (infoStream.isEnabled("IW")) { - infoStream.message("IW", String.format("merged segment size=%.3f MB vs estimate=%.3f MB", merge.info.sizeInBytes()/1024./1024., merge.estimatedMergeBytes/1024/1024.)); + infoStream.message("IW", String.format("merged segment size=%.3f MB vs estimate=%.3f MB", merge.info.info.sizeInBytes()/1024./1024., merge.estimatedMergeBytes/1024/1024.)); } final IndexReaderWarmer mergedSegmentWarmer = config.getMergedSegmentWarmer(); @@ -3624,13 +3628,14 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } } - return merge.info.docCount; + return merge.info.info.docCount; } synchronized void addMergeException(MergePolicy.OneMerge merge) { assert merge.getException() != null; - if (!mergeExceptions.contains(merge) && mergeGen == merge.mergeGen) + if (!mergeExceptions.contains(merge) && mergeGen == merge.mergeGen) { mergeExceptions.add(merge); + } } // For test purposes. @@ -3644,7 +3649,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } // utility routines for tests - synchronized SegmentInfo newestSegment() { + synchronized SegmentInfoPerCommit newestSegment() { return segmentInfos.size() > 0 ? segmentInfos.info(segmentInfos.size()-1) : null; } @@ -3654,9 +3659,9 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } /** @lucene.internal */ - public synchronized String segString(Iterable infos) throws IOException { + public synchronized String segString(Iterable infos) throws IOException { final StringBuilder buffer = new StringBuilder(); - for(final SegmentInfo info : infos) { + for(final SegmentInfoPerCommit info : infos) { if (buffer.length() > 0) { buffer.append(' '); } @@ -3666,8 +3671,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } /** @lucene.internal */ - public synchronized String segString(SegmentInfo info) throws IOException { - return info.toString(info.dir, numDeletedDocs(info) - info.getDelCount()); + public synchronized String segString(SegmentInfoPerCommit info) throws IOException { + return info.toString(info.info.dir, numDeletedDocs(info) - info.getDelCount()); } private synchronized void doWait() { @@ -3716,12 +3721,12 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { // For infoStream output synchronized SegmentInfos toLiveInfos(SegmentInfos sis) { final SegmentInfos newSIS = new SegmentInfos(); - final Map liveSIS = new HashMap(); - for(SegmentInfo info : segmentInfos) { + final Map liveSIS = new HashMap(); + for(SegmentInfoPerCommit info : segmentInfos) { liveSIS.put(info, info); } - for(SegmentInfo info : sis) { - SegmentInfo liveInfo = liveSIS.get(info); + for(SegmentInfoPerCommit info : sis) { + SegmentInfoPerCommit liveInfo = liveSIS.get(info); if (liveInfo != null) { info = liveInfo; } diff --git a/lucene/core/src/java/org/apache/lucene/index/LogByteSizeMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/LogByteSizeMergePolicy.java index dc6a7a2cb62..cc46cfd48c6 100644 --- a/lucene/core/src/java/org/apache/lucene/index/LogByteSizeMergePolicy.java +++ b/lucene/core/src/java/org/apache/lucene/index/LogByteSizeMergePolicy.java @@ -41,7 +41,7 @@ public class LogByteSizeMergePolicy extends LogMergePolicy { } @Override - protected long size(SegmentInfo info) throws IOException { + protected long size(SegmentInfoPerCommit info) throws IOException { return sizeBytes(info); } diff --git a/lucene/core/src/java/org/apache/lucene/index/LogDocMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/LogDocMergePolicy.java index ff138fff015..bb0a1983f0f 100644 --- a/lucene/core/src/java/org/apache/lucene/index/LogDocMergePolicy.java +++ b/lucene/core/src/java/org/apache/lucene/index/LogDocMergePolicy.java @@ -38,7 +38,7 @@ public class LogDocMergePolicy extends LogMergePolicy { } @Override - protected long size(SegmentInfo info) throws IOException { + protected long size(SegmentInfoPerCommit info) throws IOException { return sizeDocs(info); } diff --git a/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java index 1a73cb0a4ba..87e261e5280 100644 --- a/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java +++ b/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java @@ -134,7 +134,7 @@ public abstract class LogMergePolicy extends MergePolicy { // Javadoc inherited @Override - public boolean useCompoundFile(SegmentInfos infos, SegmentInfo mergedInfo) throws IOException { + public boolean useCompoundFile(SegmentInfos infos, SegmentInfoPerCommit mergedInfo) throws IOException { final boolean doCFS; if (!useCompoundFile) { @@ -143,8 +143,9 @@ public abstract class LogMergePolicy extends MergePolicy { doCFS = true; } else { long totalSize = 0; - for (SegmentInfo info : infos) + for (SegmentInfoPerCommit info : infos) { totalSize += size(info); + } doCFS = size(mergedInfo) <= noCFSRatio * totalSize; } @@ -179,37 +180,37 @@ public abstract class LogMergePolicy extends MergePolicy { @Override public void close() {} - abstract protected long size(SegmentInfo info) throws IOException; + abstract protected long size(SegmentInfoPerCommit info) throws IOException; - protected long sizeDocs(SegmentInfo info) throws IOException { + protected long sizeDocs(SegmentInfoPerCommit info) throws IOException { if (calibrateSizeByDeletes) { int delCount = writer.get().numDeletedDocs(info); - assert delCount <= info.docCount; - return (info.docCount - (long)delCount); + assert delCount <= info.info.docCount; + return (info.info.docCount - (long)delCount); } else { - return info.docCount; + return info.info.docCount; } } - protected long sizeBytes(SegmentInfo info) throws IOException { + protected long sizeBytes(SegmentInfoPerCommit info) throws IOException { long byteSize = info.sizeInBytes(); if (calibrateSizeByDeletes) { int delCount = writer.get().numDeletedDocs(info); - double delRatio = (info.docCount <= 0 ? 0.0f : ((float)delCount / (float)info.docCount)); + double delRatio = (info.info.docCount <= 0 ? 0.0f : ((float)delCount / (float)info.info.docCount)); assert delRatio <= 1.0; - return (info.docCount <= 0 ? byteSize : (long)(byteSize * (1.0 - delRatio))); + return (info.info.docCount <= 0 ? byteSize : (long)(byteSize * (1.0 - delRatio))); } else { return byteSize; } } - protected boolean isMerged(SegmentInfos infos, int maxNumSegments, Map segmentsToMerge) throws IOException { + protected boolean isMerged(SegmentInfos infos, int maxNumSegments, Map segmentsToMerge) throws IOException { final int numSegments = infos.size(); int numToMerge = 0; - SegmentInfo mergeInfo = null; + SegmentInfoPerCommit mergeInfo = null; boolean segmentIsOriginal = false; for(int i=0;i 0; return !hasDeletions && - !info.hasSeparateNorms() && - info.dir == w.getDirectory() && - (info.getUseCompoundFile() == useCompoundFile || noCFSRatio < 1.0); + !info.info.hasSeparateNorms() && + info.info.dir == w.getDirectory() && + (info.info.getUseCompoundFile() == useCompoundFile || noCFSRatio < 1.0); } /** @@ -247,11 +248,11 @@ public abstract class LogMergePolicy extends MergePolicy { private MergeSpecification findForcedMergesSizeLimit( SegmentInfos infos, int maxNumSegments, int last) throws IOException { MergeSpecification spec = new MergeSpecification(); - final List segments = infos.asList(); + final List segments = infos.asList(); int start = last - 1; while (start >= 0) { - SegmentInfo info = infos.info(start); + SegmentInfoPerCommit info = infos.info(start); if (size(info) > maxMergeSizeForForcedMerge || sizeDocs(info) > maxMergeDocs) { if (verbose()) { message("findForcedMergesSizeLimit: skip segment=" + info + ": size is > maxMergeSize (" + maxMergeSizeForForcedMerge + ") or sizeDocs is > maxMergeDocs (" + maxMergeDocs + ")"); @@ -288,7 +289,7 @@ public abstract class LogMergePolicy extends MergePolicy { */ private MergeSpecification findForcedMergesMaxNumSegments(SegmentInfos infos, int maxNumSegments, int last) throws IOException { MergeSpecification spec = new MergeSpecification(); - final List segments = infos.asList(); + final List segments = infos.asList(); // First, enroll all "full" merges (size // mergeFactor) to potentially be run concurrently: @@ -326,8 +327,9 @@ public abstract class LogMergePolicy extends MergePolicy { for(int i=0;i segmentsToMerge) throws IOException { + int maxNumSegments, Map segmentsToMerge) throws IOException { assert maxNumSegments > 0; if (verbose()) { @@ -373,7 +375,7 @@ public abstract class LogMergePolicy extends MergePolicy { // since merging started): int last = infos.size(); while (last > 0) { - final SegmentInfo info = infos.info(--last); + final SegmentInfoPerCommit info = infos.info(--last); if (segmentsToMerge.get(info) != null) { last++; break; @@ -398,7 +400,7 @@ public abstract class LogMergePolicy extends MergePolicy { // Check if there are any segments above the threshold boolean anyTooLarge = false; for (int i = 0; i < last; i++) { - SegmentInfo info = infos.info(i); + SegmentInfoPerCommit info = infos.info(i); if (size(info) > maxMergeSizeForForcedMerge || sizeDocs(info) > maxMergeDocs) { anyTooLarge = true; break; @@ -420,7 +422,7 @@ public abstract class LogMergePolicy extends MergePolicy { @Override public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos) throws CorruptIndexException, IOException { - final List segments = segmentInfos.asList(); + final List segments = segmentInfos.asList(); final int numSegments = segments.size(); if (verbose()) { @@ -432,11 +434,11 @@ public abstract class LogMergePolicy extends MergePolicy { IndexWriter w = writer.get(); assert w != null; for(int i=0;i 0) { if (verbose()) { - message(" segment " + info.name + " has deletions"); + message(" segment " + info.info.name + " has deletions"); } if (firstSegmentWithDeletions == -1) firstSegmentWithDeletions = i; @@ -472,11 +474,11 @@ public abstract class LogMergePolicy extends MergePolicy { } private static class SegmentInfoAndLevel implements Comparable { - SegmentInfo info; + SegmentInfoPerCommit info; float level; int index; - public SegmentInfoAndLevel(SegmentInfo info, float level, int index) { + public SegmentInfoAndLevel(SegmentInfoPerCommit info, float level, int index) { this.info = info; this.level = level; this.index = index; @@ -484,12 +486,13 @@ public abstract class LogMergePolicy extends MergePolicy { // Sorts largest to smallest public int compareTo(SegmentInfoAndLevel other) { - if (level < other.level) + if (level < other.level) { return 1; - else if (level > other.level) + } else if (level > other.level) { return -1; - else + } else { return 0; + } } } @@ -513,10 +516,10 @@ public abstract class LogMergePolicy extends MergePolicy { final List levels = new ArrayList(); final float norm = (float) Math.log(mergeFactor); - final Collection mergingSegments = writer.get().getMergingSegments(); + final Collection mergingSegments = writer.get().getMergingSegments(); for(int i=0;i maxLevel) + if (level > maxLevel) { maxLevel = level; + } } // Now search backwards for the rightmost segment that // falls into this level: float levelBottom; - if (maxLevel <= levelFloor) + if (maxLevel <= levelFloor) { // All remaining segments fall into the min level levelBottom = -1.0F; - else { + } else { levelBottom = (float) (maxLevel - LEVEL_LOG_SPAN); // Force a boundary at the level floor - if (levelBottom < levelFloor && maxLevel >= levelFloor) + if (levelBottom < levelFloor && maxLevel >= levelFloor) { levelBottom = levelFloor; + } } int upto = numMergeableSegments-1; @@ -597,7 +602,7 @@ public abstract class LogMergePolicy extends MergePolicy { boolean anyTooLarge = false; boolean anyMerging = false; for(int i=start;i= maxMergeSize || sizeDocs(info) >= maxMergeDocs); if (mergingSegments.contains(info)) { anyMerging = true; @@ -610,7 +615,7 @@ public abstract class LogMergePolicy extends MergePolicy { } else if (!anyTooLarge) { if (spec == null) spec = new MergeSpecification(); - final List mergeInfos = new ArrayList(); + final List mergeInfos = new ArrayList(); for(int i=start;i readers; // used by IndexWriter List readerLiveDocs; // used by IndexWriter - public final List segments; + public final List segments; public final int totalDocCount; boolean aborted; Throwable error; boolean paused; - public OneMerge(List segments) { + public OneMerge(List segments) { if (0 == segments.size()) throw new RuntimeException("segments must include at least one segment"); // clone the list, as the in list may be based off original SegmentInfos and may be modified - this.segments = new ArrayList(segments); + this.segments = new ArrayList(segments); int count = 0; - for(SegmentInfo info : segments) { - count += info.docCount; + for(SegmentInfoPerCommit info : segments) { + count += info.info.docCount; } totalDocCount = count; } @@ -156,8 +156,9 @@ public abstract class MergePolicy implements java.io.Closeable { if (i > 0) b.append(' '); b.append(segments.get(i).toString(dir, 0)); } - if (info != null) - b.append(" into ").append(info.name); + if (info != null) { + b.append(" into ").append(info.info.name); + } if (maxNumSegments != -1) b.append(" [maxNumSegments=" + maxNumSegments + "]"); if (aborted) { @@ -172,8 +173,8 @@ public abstract class MergePolicy implements java.io.Closeable { * */ public long totalBytesSize() throws IOException { long total = 0; - for (SegmentInfo info : segments) { - total += info.sizeInBytes(); + for (SegmentInfoPerCommit info : segments) { + total += info.info.sizeInBytes(); } return total; } @@ -184,8 +185,8 @@ public abstract class MergePolicy implements java.io.Closeable { * */ public int totalNumDocs() throws IOException { int total = 0; - for (SegmentInfo info : segments) { - total += info.docCount; + for (SegmentInfoPerCommit info : segments) { + total += info.info.docCount; } return total; } @@ -309,7 +310,7 @@ public abstract class MergePolicy implements java.io.Closeable { * produced by a cascaded merge. */ public abstract MergeSpecification findForcedMerges( - SegmentInfos segmentInfos, int maxSegmentCount, Map segmentsToMerge) + SegmentInfos segmentInfos, int maxSegmentCount, Map segmentsToMerge) throws CorruptIndexException, IOException; /** @@ -330,5 +331,5 @@ public abstract class MergePolicy implements java.io.Closeable { /** * Returns true if a new segment (regardless of its origin) should use the compound file format. */ - public abstract boolean useCompoundFile(SegmentInfos segments, SegmentInfo newSegment) throws IOException; + public abstract boolean useCompoundFile(SegmentInfos segments, SegmentInfoPerCommit newSegment) throws IOException; } diff --git a/lucene/core/src/java/org/apache/lucene/index/NoMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/NoMergePolicy.java index 1d04f9ae4a3..212d70157f3 100644 --- a/lucene/core/src/java/org/apache/lucene/index/NoMergePolicy.java +++ b/lucene/core/src/java/org/apache/lucene/index/NoMergePolicy.java @@ -59,7 +59,7 @@ public final class NoMergePolicy extends MergePolicy { @Override public MergeSpecification findForcedMerges(SegmentInfos segmentInfos, - int maxSegmentCount, Map segmentsToMerge) + int maxSegmentCount, Map segmentsToMerge) throws CorruptIndexException, IOException { return null; } @Override @@ -67,7 +67,7 @@ public final class NoMergePolicy extends MergePolicy { throws CorruptIndexException, IOException { return null; } @Override - public boolean useCompoundFile(SegmentInfos segments, SegmentInfo newSegment) { return useCompoundFile; } + public boolean useCompoundFile(SegmentInfos segments, SegmentInfoPerCommit newSegment) { return useCompoundFile; } @Override public void setIndexWriter(IndexWriter writer) {} diff --git a/lucene/core/src/java/org/apache/lucene/index/ReadersAndLiveDocs.java b/lucene/core/src/java/org/apache/lucene/index/ReadersAndLiveDocs.java index bb383200358..e9a17c9d22b 100644 --- a/lucene/core/src/java/org/apache/lucene/index/ReadersAndLiveDocs.java +++ b/lucene/core/src/java/org/apache/lucene/index/ReadersAndLiveDocs.java @@ -32,7 +32,7 @@ import org.apache.lucene.util.MutableBits; class ReadersAndLiveDocs { // Not final because we replace (clone) when we need to // change it and it's been shared: - public final SegmentInfo info; + public final SegmentInfoPerCommit info; // Tracks how many consumers are using this instance: private final AtomicInteger refCount = new AtomicInteger(1); @@ -67,7 +67,7 @@ class ReadersAndLiveDocs { // external NRT reader: private boolean shared; - public ReadersAndLiveDocs(IndexWriter writer, SegmentInfo info) { + public ReadersAndLiveDocs(IndexWriter writer, SegmentInfoPerCommit info) { this.info = info; this.writer = writer; shared = true; @@ -98,16 +98,16 @@ class ReadersAndLiveDocs { int count; if (liveDocs != null) { count = 0; - for(int docID=0;docID diagnostics; @@ -106,10 +92,6 @@ public class SegmentInfo implements Cloneable { // nocommit final? private String version; - // NOTE: only used in-RAM by IW to track buffered deletes; - // this is never written to/read from the Directory - private long bufferedDeletesGen; - void setDiagnostics(Map diagnostics) { this.diagnostics = diagnostics; } @@ -125,19 +107,17 @@ public class SegmentInfo implements Cloneable { */ public SegmentInfo(Directory dir, String version, String name, int docCount, int docStoreOffset, String docStoreSegment, boolean docStoreIsCompoundFile, Map normGen, boolean isCompoundFile, - int delCount, Codec codec, Map diagnostics) { + Codec codec, Map diagnostics) { assert !(dir instanceof TrackingDirectoryWrapper); this.dir = dir; this.version = version; this.name = name; this.docCount = docCount; - this.delGen = NO; this.docStoreOffset = docStoreOffset; this.docStoreSegment = docStoreSegment; this.docStoreIsCompoundFile = docStoreIsCompoundFile; this.normGen = normGen; this.isCompoundFile = isCompoundFile; - this.delCount = delCount; this.codec = codec; this.diagnostics = diagnostics; } @@ -145,6 +125,7 @@ public class SegmentInfo implements Cloneable { /** * Returns total size in bytes of all of files used by this segment */ + // nocommit fails to take live docs into account... hmmm public long sizeInBytes() throws IOException { if (sizeInBytes == -1) { long sum = 0; @@ -156,37 +137,11 @@ public class SegmentInfo implements Cloneable { return sizeInBytes; } - public boolean hasDeletions() { - // Cases: - // - // delGen == NO: this means this segment does not have deletions yet - // delGen >= YES: this means this segment has deletions - // - return delGen != NO; - } - - void advanceDelGen() { - if (delGen == NO) { - delGen = YES; - } else { - delGen++; - } - sizeInBytes = -1; - } - - public long getNextDelGen() { - if (delGen == NO) { - return YES; - } else { - return delGen + 1; - } - } - - void clearDelGen() { - delGen = NO; + void clearSizeInBytes() { sizeInBytes = -1; } + // nocommit nuke? @Override public SegmentInfo clone() { final HashMap clonedNormGen; @@ -201,7 +156,7 @@ public class SegmentInfo implements Cloneable { SegmentInfo newInfo = new SegmentInfo(dir, version, name, docCount, docStoreOffset, docStoreSegment, docStoreIsCompoundFile, clonedNormGen, isCompoundFile, - delCount, codec, new HashMap(diagnostics)); + codec, new HashMap(diagnostics)); final Set clonedFiles; if (setFiles != null) { clonedFiles = new HashSet(setFiles); @@ -209,8 +164,6 @@ public class SegmentInfo implements Cloneable { clonedFiles = null; } newInfo.setFiles(clonedFiles); - - newInfo.setDelGen(delGen); return newInfo; } @@ -250,20 +203,6 @@ public class SegmentInfo implements Cloneable { return isCompoundFile; } - public int getDelCount() { - return delCount; - } - - void setDelCount(int delCount) { - this.delCount = delCount; - assert delCount <= docCount; - } - - public void setDelGen(long delGen) { - this.delGen = delGen; - sizeInBytes = -1; - } - /** * @deprecated shared doc stores are not supported in >= 4.0 */ @@ -310,20 +249,15 @@ public class SegmentInfo implements Cloneable { * modify it. */ + // nocommit remove this temporarily to see who is calling + // it ... very dangerous having this one AND SIPC.files() public Collection files() throws IOException { // nocommit make sure when we are called we really have // files set ... if (setFiles == null) { throw new IllegalStateException("files were not computed yet"); } - - Set files = new HashSet(setFiles); - - // nocommit make this take list instead...? - // Must separately add any live docs files: - codec.liveDocsFormat().files(this, files); - - return new ArrayList(files); + return setFiles; } /** {@inheritDoc} */ @@ -344,7 +278,7 @@ public class SegmentInfo implements Cloneable { * shared doc stores named _1 (this part is * left off if doc stores are private).

*/ - public String toString(Directory dir, int pendingDelCount) { + public String toString(Directory dir, int delCount) { StringBuilder s = new StringBuilder(); s.append(name).append('(').append(version == null ? "?" : version).append(')').append(':'); @@ -356,7 +290,6 @@ public class SegmentInfo implements Cloneable { } s.append(docCount); - int delCount = getDelCount() + pendingDelCount; if (delCount != 0) { s.append('/').append(delCount); } @@ -412,19 +345,6 @@ public class SegmentInfo implements Cloneable { return version; } - long getBufferedDeletesGen() { - return bufferedDeletesGen; - } - - void setBufferedDeletesGen(long v) { - bufferedDeletesGen = v; - } - - /** @lucene.internal */ - public long getDelGen() { - return delGen; - } - /** @lucene.internal */ public Map getNormGen() { return normGen; diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentInfoPerCommit.java b/lucene/core/src/java/org/apache/lucene/index/SegmentInfoPerCommit.java new file mode 100644 index 00000000000..82b4edb75ae --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/index/SegmentInfoPerCommit.java @@ -0,0 +1,130 @@ +package org.apache.lucene.index; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.io.IOException; +import java.util.Collection; +import java.util.HashSet; + +import org.apache.lucene.store.Directory; + +/** Embeds a [read-only] SegmentInfo and adds per-commit + * fields. + * + * @lucene.experimental */ + +// nocommit this class feels alot like ReaderAndLiveDocs...? +// like it carries mutable per-segment state....? +public class SegmentInfoPerCommit { + + public final SegmentInfo info; + + // How many deleted docs in the segment: + private int delCount; + + // Generation number of the live docs file (-1 if there + // are no deletes yet): + private long delGen; + + public SegmentInfoPerCommit(SegmentInfo info, int delCount, long delGen) { + this.info = info; + this.delCount = delCount; + this.delGen = delGen; + } + + void advanceDelGen() { + if (delGen == -1) { + delGen = 1; + } else { + delGen++; + } + info.clearSizeInBytes(); + } + + public long sizeInBytes() throws IOException { + // nocommit add in live docs size + return info.sizeInBytes(); + } + + public Collection files() throws IOException { + Collection files = new HashSet(info.files()); + + // nocommit make this take list instead...? + // Must separately add any live docs files: + info.getCodec().liveDocsFormat().files(this, files); + + return files; + } + + // NOTE: only used in-RAM by IW to track buffered deletes; + // this is never written to/read from the Directory + private long bufferedDeletesGen; + + long getBufferedDeletesGen() { + return bufferedDeletesGen; + } + + void setBufferedDeletesGen(long v) { + bufferedDeletesGen = v; + } + + void clearDelGen() { + delGen = -1; + info.clearSizeInBytes(); + } + + public void setDelGen(long delGen) { + this.delGen = delGen; + info.clearSizeInBytes(); + } + + public boolean hasDeletions() { + return delGen != -1; + } + + public long getNextDelGen() { + if (delGen == -1) { + return 1; + } else { + return delGen + 1; + } + } + + public long getDelGen() { + return delGen; + } + + public int getDelCount() { + return delCount; + } + + void setDelCount(int delCount) { + this.delCount = delCount; + assert delCount <= info.docCount; + } + + public String toString(Directory dir, int pendingDelCount) { + return info.toString(dir, delCount + pendingDelCount); + } + + @Override + public SegmentInfoPerCommit clone() { + // nocommit ok? SI is immutable!? + return new SegmentInfoPerCommit(info, delCount, delGen); + } +} diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java b/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java index 8ff077c88d0..3c6d1d56ecf 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java +++ b/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java @@ -113,7 +113,7 @@ import org.apache.lucene.util.ThreadInterruptedException; * * @lucene.experimental */ -public final class SegmentInfos implements Cloneable, Iterable { +public final class SegmentInfos implements Cloneable, Iterable { /** * The file format version for the segments_N codec header @@ -138,9 +138,7 @@ public final class SegmentInfos implements Cloneable, Iterable { public Map userData = Collections.emptyMap(); // Opaque Map that user can specify during IndexWriter.commit - private List segments = new ArrayList(); - private Set segmentSet = new HashSet(); - private transient List cachedUnmodifiableList; + private List segments = new ArrayList(); /** * If non-null, information about loading segments_N files @@ -148,7 +146,7 @@ public final class SegmentInfos implements Cloneable, Iterable { */ private static PrintStream infoStream = null; - public SegmentInfo info(int i) { + public SegmentInfoPerCommit info(int i) { return segments.get(i); } @@ -288,17 +286,17 @@ public final class SegmentInfos implements Cloneable, Iterable { //System.out.println("SIS.read seg=" + seg + " codec=" + codec); SegmentInfo info = codec.segmentInfosFormat().getSegmentInfosReader().read(directory, segName, IOContext.READ); info.setCodec(codec); - info.setDelGen(input.readLong()); - info.setDelCount(input.readInt()); - assert info.getDelCount() <= info.docCount; - add(info); + long delGen = input.readLong(); + int delCount = input.readInt(); + assert delCount <= info.docCount; + add(new SegmentInfoPerCommit(info, delCount, delGen)); } userData = input.readStringStringMap(); } else { Lucene3xSegmentInfoReader.readLegacyInfos(this, directory, input, format); Codec codec = Codec.forName("Lucene3x"); - for (SegmentInfo info : this) { - info.setCodec(codec); + for (SegmentInfoPerCommit info : this) { + info.info.setCodec(codec); } } @@ -364,13 +362,16 @@ public final class SegmentInfos implements Cloneable, Iterable { segnOutput.writeLong(version); segnOutput.writeInt(counter); // write counter segnOutput.writeInt(size()); // write infos - for (SegmentInfo si : this) { + for (SegmentInfoPerCommit siPerCommit : this) { + SegmentInfo si = siPerCommit.info; segnOutput.writeString(si.name); segnOutput.writeString(si.getCodec().getName()); - segnOutput.writeLong(si.getDelGen()); - segnOutput.writeInt(si.getDelCount()); + segnOutput.writeLong(siPerCommit.getDelGen()); + segnOutput.writeInt(siPerCommit.getDelCount()); assert si.dir == directory; + assert siPerCommit.getDelCount() <= si.docCount; + // If this segment is pre-4.x, perform a one-time // "ugprade" to write the .si file for it: String version = si.getVersion(); @@ -423,12 +424,13 @@ public final class SegmentInfos implements Cloneable, Iterable { // we are about to write this SI in 3.x format, dropping all codec information, etc. // so it had better be a 3.x segment or you will get very confusing errors later. assert si.getCodec() instanceof Lucene3xCodec : "broken test, trying to mix preflex with other codecs"; - assert si.getDelCount() <= si.docCount: "delCount=" + si.getDelCount() + " docCount=" + si.docCount + " segment=" + si.name; // Write the Lucene version that created this segment, since 3.1 output.writeString(si.getVersion()); output.writeString(si.name); output.writeInt(si.docCount); - output.writeLong(si.getDelGen()); + + // NOTE: a lie + output.writeLong(0L); output.writeInt(si.getDocStoreOffset()); if (si.getDocStoreOffset() != -1) { @@ -449,7 +451,10 @@ public final class SegmentInfos implements Cloneable, Iterable { } output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO)); - output.writeInt(si.getDelCount()); + + // NOTE: a lie + output.writeInt(0); + // hasProx (lie): output.writeByte((byte) 1); output.writeStringStringMap(si.getDiagnostics()); @@ -484,11 +489,9 @@ public final class SegmentInfos implements Cloneable, Iterable { try { final SegmentInfos sis = (SegmentInfos) super.clone(); // deep clone, first recreate all collections: - sis.segments = new ArrayList(size()); - sis.segmentSet = new HashSet(size()); - sis.cachedUnmodifiableList = null; - for(final SegmentInfo info : this) { - assert info.getCodec() != null; + sis.segments = new ArrayList(size()); + for(final SegmentInfoPerCommit info : this) { + assert info.info.getCodec() != null; // dont directly access segments, use add method!!! sis.add(info.clone()); } @@ -857,9 +860,10 @@ public final class SegmentInfos implements Cloneable, Iterable { } final int size = size(); for(int i=0;i { if (i > 0) { buffer.append(' '); } - final SegmentInfo info = info(i); + final SegmentInfoPerCommit info = info(i); buffer.append(info.toString(directory, 0)); } return buffer.toString(); @@ -986,8 +990,8 @@ public final class SegmentInfos implements Cloneable, Iterable { * this does not include deletions */ public int totalDocCount() { int count = 0; - for(SegmentInfo info : this) { - count += info.docCount; + for(SegmentInfoPerCommit info : this) { + count += info.info.docCount; } return count; } @@ -1000,12 +1004,12 @@ public final class SegmentInfos implements Cloneable, Iterable { /** applies all changes caused by committing a merge to this SegmentInfos */ void applyMergeChanges(MergePolicy.OneMerge merge, boolean dropSegment) { - final Set mergedAway = new HashSet(merge.segments); + final Set mergedAway = new HashSet(merge.segments); boolean inserted = false; int newSegIdx = 0; for (int segIdx = 0, cnt = segments.size(); segIdx < cnt; segIdx++) { assert segIdx >= newSegIdx; - final SegmentInfo info = segments.get(segIdx); + final SegmentInfoPerCommit info = segments.get(segIdx); if (mergedAway.contains(info)) { if (!inserted && !dropSegment) { segments.set(segIdx, merge.info); @@ -1029,93 +1033,68 @@ public final class SegmentInfos implements Cloneable, Iterable { if (!inserted && !dropSegment) { segments.add(0, merge.info); } - - // update the Set - if (!dropSegment) { - segmentSet.add(merge.info); - } - segmentSet.removeAll(mergedAway); - - assert segmentSet.size() == segments.size(); } - List createBackupSegmentInfos(boolean cloneChildren) { - if (cloneChildren) { - final List list = new ArrayList(size()); - for(final SegmentInfo info : this) { - assert info.getCodec() != null; - list.add(info.clone()); - } - return list; - } else { - return new ArrayList(segments); + List createBackupSegmentInfos() { + final List list = new ArrayList(size()); + for(final SegmentInfoPerCommit info : this) { + assert info.info.getCodec() != null; + list.add(info.clone()); } + return list; } - void rollbackSegmentInfos(List infos) { + void rollbackSegmentInfos(List infos) { this.clear(); this.addAll(infos); } /** Returns an unmodifiable {@link Iterator} of contained segments in order. */ // @Override (comment out until Java 6) - public Iterator iterator() { + public Iterator iterator() { return asList().iterator(); } /** Returns all contained segments as an unmodifiable {@link List} view. */ - public List asList() { - if (cachedUnmodifiableList == null) { - cachedUnmodifiableList = Collections.unmodifiableList(segments); - } - return cachedUnmodifiableList; + public List asList() { + return Collections.unmodifiableList(segments); } public int size() { return segments.size(); } - public void add(SegmentInfo si) { - if (segmentSet.contains(si)) { - throw new IllegalStateException("Cannot add the same segment two times to this SegmentInfos instance"); - } + public void add(SegmentInfoPerCommit si) { segments.add(si); - segmentSet.add(si); - assert segmentSet.size() == segments.size(); } - public void addAll(Iterable sis) { - for (final SegmentInfo si : sis) { + public void addAll(Iterable sis) { + for (final SegmentInfoPerCommit si : sis) { this.add(si); } } public void clear() { segments.clear(); - segmentSet.clear(); - } - - public void remove(SegmentInfo si) { - final int index = this.indexOf(si); - if (index >= 0) { - this.remove(index); - } - } - - public void remove(int index) { - segmentSet.remove(segments.remove(index)); - assert segmentSet.size() == segments.size(); - } - - public boolean contains(SegmentInfo si) { - return segmentSet.contains(si); } - public int indexOf(SegmentInfo si) { - if (segmentSet.contains(si)) { - return segments.indexOf(si); - } else { - return -1; - } + /** WARNING: O(N) cost */ + public void remove(SegmentInfoPerCommit si) { + segments.remove(si); + } + + /** WARNING: O(N) cost */ + void remove(int index) { + segments.remove(index); + } + + /** WARNING: O(N) cost */ + boolean contains(SegmentInfoPerCommit si) { + return segments.contains(si); + } + + /** WARNING: O(N) cost */ + int indexOf(SegmentInfoPerCommit si) { + return segments.indexOf(si); } } diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java index 03e3c31f413..98a5ade280d 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java +++ b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java @@ -36,7 +36,7 @@ import org.apache.lucene.util.Bits; */ public final class SegmentReader extends AtomicReader { - private final SegmentInfo si; + private final SegmentInfoPerCommit si; private final Bits liveDocs; // Normally set to si.docCount - si.delDocCount, unless we @@ -50,19 +50,19 @@ public final class SegmentReader extends AtomicReader { * @throws CorruptIndexException if the index is corrupt * @throws IOException if there is a low-level IO error */ - public SegmentReader(SegmentInfo si, int termInfosIndexDivisor, IOContext context) throws IOException { + public SegmentReader(SegmentInfoPerCommit si, int termInfosIndexDivisor, IOContext context) throws IOException { this.si = si; - core = new SegmentCoreReaders(this, si.dir, si, context, termInfosIndexDivisor); + core = new SegmentCoreReaders(this, si.info.dir, si, context, termInfosIndexDivisor); boolean success = false; try { if (si.hasDeletions()) { // NOTE: the bitvector is stored using the regular directory, not cfs - liveDocs = si.getCodec().liveDocsFormat().readLiveDocs(directory(), si, new IOContext(IOContext.READ, true)); + liveDocs = si.info.getCodec().liveDocsFormat().readLiveDocs(directory(), si, new IOContext(IOContext.READ, true)); } else { assert si.getDelCount() == 0; liveDocs = null; } - numDocs = si.docCount - si.getDelCount(); + numDocs = si.info.docCount - si.getDelCount(); success = true; } finally { // With lock-less commits, it's entirely possible (and @@ -79,15 +79,17 @@ public final class SegmentReader extends AtomicReader { // Create new SegmentReader sharing core from a previous // SegmentReader and loading new live docs from a new // deletes file. Used by openIfChanged. - SegmentReader(SegmentInfo si, SegmentCoreReaders core, IOContext context) throws IOException { - this(si, core, si.getCodec().liveDocsFormat().readLiveDocs(si.dir, si, context), si.docCount - si.getDelCount()); + SegmentReader(SegmentInfoPerCommit si, SegmentCoreReaders core, IOContext context) throws IOException { + this(si, core, + si.info.getCodec().liveDocsFormat().readLiveDocs(si.info.dir, si, context), + si.info.docCount - si.getDelCount()); } // Create new SegmentReader sharing core from a previous // SegmentReader and using the provided in-memory // liveDocs. Used by IndexWriter to provide a new NRT // reader: - SegmentReader(SegmentInfo si, SegmentCoreReaders core, Bits liveDocs, int numDocs) throws IOException { + SegmentReader(SegmentInfoPerCommit si, SegmentCoreReaders core, Bits liveDocs, int numDocs) throws IOException { this.si = si; this.core = core; core.incRef(); @@ -151,7 +153,7 @@ public final class SegmentReader extends AtomicReader { @Override public int maxDoc() { // Don't call ensureOpen() here (it could affect performance) - return si.docCount; + return si.info.docCount; } /** @lucene.internal */ @@ -179,20 +181,20 @@ public final class SegmentReader extends AtomicReader { public String toString() { // SegmentInfo.toString takes dir and number of // *pending* deletions; so we reverse compute that here: - return si.toString(si.dir, si.docCount - numDocs - si.getDelCount()); + return si.toString(si.info.dir, si.info.docCount - numDocs - si.getDelCount()); } /** * Return the name of the segment this reader is reading. */ public String getSegmentName() { - return si.name; + return si.info.name; } /** - * Return the SegmentInfo of the segment this reader is reading. + * Return the SegmentInfoPerCommit of the segment this reader is reading. */ - SegmentInfo getSegmentInfo() { + SegmentInfoPerCommit getSegmentInfo() { return si; } @@ -201,7 +203,7 @@ public final class SegmentReader extends AtomicReader { // Don't ensureOpen here -- in certain cases, when a // cloned/reopened reader needs to commit, it may call // this method on the closed original reader - return si.dir; + return si.info.dir; } // This is necessary so that cloned SegmentReaders (which diff --git a/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java b/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java index 704a926e724..23511e2129e 100644 --- a/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java +++ b/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java @@ -89,8 +89,8 @@ final class StandardDirectoryReader extends DirectoryReader { IOException prior = null; boolean success = false; try { - final SegmentInfo info = infos.info(i); - assert info.dir == dir; + final SegmentInfoPerCommit info = infos.info(i); + assert info.info.dir == dir; final ReadersAndLiveDocs rld = writer.readerPool.get(info, true); try { final SegmentReader reader = rld.getReadOnlyClone(IOContext.READ); @@ -140,7 +140,7 @@ final class StandardDirectoryReader extends DirectoryReader { for (int i = infos.size() - 1; i>=0; i--) { // find SegmentReader for this segment - Integer oldReaderIndex = segmentReaders.get(infos.info(i).name); + Integer oldReaderIndex = segmentReaders.get(infos.info(i).info.name); if (oldReaderIndex == null) { // this is a new segment, no old SegmentReader can be reused newReaders[i] = null; @@ -153,7 +153,7 @@ final class StandardDirectoryReader extends DirectoryReader { IOException prior = null; try { SegmentReader newReader; - if (newReaders[i] == null || infos.info(i).getUseCompoundFile() != newReaders[i].getSegmentInfo().getUseCompoundFile()) { + if (newReaders[i] == null || infos.info(i).info.getUseCompoundFile() != newReaders[i].getSegmentInfo().info.getUseCompoundFile()) { // this is a new reader; in case we hit an exception we can close it safely newReader = new SegmentReader(infos.info(i), termInfosIndexDivisor, IOContext.READ); @@ -169,7 +169,7 @@ final class StandardDirectoryReader extends DirectoryReader { } else { readerShared[i] = false; // Steal the ref returned by SegmentReader ctor: - assert infos.info(i).dir == newReaders[i].getSegmentInfo().dir; + assert infos.info(i).info.dir == newReaders[i].getSegmentInfo().info.dir; assert infos.info(i).hasDeletions(); newReaders[i] = new SegmentReader(infos.info(i), newReaders[i].core, IOContext.READ); } diff --git a/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java index 870bd726656..6a092071946 100644 --- a/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java +++ b/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java @@ -238,8 +238,8 @@ public class TieredMergePolicy extends MergePolicy { return noCFSRatio; } - private class SegmentByteSizeDescending implements Comparator { - public int compare(SegmentInfo o1, SegmentInfo o2) { + private class SegmentByteSizeDescending implements Comparator { + public int compare(SegmentInfoPerCommit o1, SegmentInfoPerCommit o2) { try { final long sz1 = size(o1); final long sz2 = size(o2); @@ -248,7 +248,7 @@ public class TieredMergePolicy extends MergePolicy { } else if (sz2 > sz1) { return 1; } else { - return o1.name.compareTo(o2.name); + return o1.info.name.compareTo(o2.info.name); } } catch (IOException ioe) { throw new RuntimeException(ioe); @@ -256,7 +256,7 @@ public class TieredMergePolicy extends MergePolicy { } } - private final Comparator segmentByteSizeDescending = new SegmentByteSizeDescending(); + private final Comparator segmentByteSizeDescending = new SegmentByteSizeDescending(); /** Holds score and explanation for a single candidate * merge. */ @@ -273,16 +273,16 @@ public class TieredMergePolicy extends MergePolicy { if (infos.size() == 0) { return null; } - final Collection merging = writer.get().getMergingSegments(); - final Collection toBeMerged = new HashSet(); + final Collection merging = writer.get().getMergingSegments(); + final Collection toBeMerged = new HashSet(); - final List infosSorted = new ArrayList(infos.asList()); + final List infosSorted = new ArrayList(infos.asList()); Collections.sort(infosSorted, segmentByteSizeDescending); // Compute total index bytes & print details about the index long totIndexBytes = 0; long minSegmentBytes = Long.MAX_VALUE; - for(SegmentInfo info : infosSorted) { + for(SegmentInfoPerCommit info : infosSorted) { final long segBytes = size(info); if (verbose()) { String extra = merging.contains(info) ? " [merging]" : ""; @@ -335,11 +335,11 @@ public class TieredMergePolicy extends MergePolicy { // Gather eligible segments for merging, ie segments // not already being merged and not already picked (by // prior iteration of this loop) for merging: - final List eligible = new ArrayList(); + final List eligible = new ArrayList(); for(int idx = tooBigCount; idx best = null; + List best = null; boolean bestTooLarge = false; long bestMergeBytes = 0; @@ -368,10 +368,10 @@ public class TieredMergePolicy extends MergePolicy { long totAfterMergeBytes = 0; - final List candidate = new ArrayList(); + final List candidate = new ArrayList(); boolean hitTooLarge = false; for(int idx = startIdx;idx maxMergedSegmentBytes) { @@ -410,7 +410,7 @@ public class TieredMergePolicy extends MergePolicy { } final OneMerge merge = new OneMerge(best); spec.add(merge); - for(SegmentInfo info : merge.segments) { + for(SegmentInfoPerCommit info : merge.segments) { toBeMerged.add(info); } @@ -427,15 +427,15 @@ public class TieredMergePolicy extends MergePolicy { } /** Expert: scores one merge; subclasses can override. */ - protected MergeScore score(List candidate, boolean hitTooLarge, long mergingBytes) throws IOException { + protected MergeScore score(List candidate, boolean hitTooLarge, long mergingBytes) throws IOException { long totBeforeMergeBytes = 0; long totAfterMergeBytes = 0; long totAfterMergeBytesFloored = 0; - for(SegmentInfo info : candidate) { + for(SegmentInfoPerCommit info : candidate) { final long segBytes = size(info); totAfterMergeBytes += segBytes; totAfterMergeBytesFloored += floorSize(segBytes); - totBeforeMergeBytes += info.sizeInBytes(); + totBeforeMergeBytes += info.info.sizeInBytes(); } // Measure "skew" of the merge, which can range @@ -483,16 +483,16 @@ public class TieredMergePolicy extends MergePolicy { } @Override - public MergeSpecification findForcedMerges(SegmentInfos infos, int maxSegmentCount, Map segmentsToMerge) throws IOException { + public MergeSpecification findForcedMerges(SegmentInfos infos, int maxSegmentCount, Map segmentsToMerge) throws IOException { if (verbose()) { message("findForcedMerges maxSegmentCount=" + maxSegmentCount + " infos=" + writer.get().segString(infos) + " segmentsToMerge=" + segmentsToMerge); } - List eligible = new ArrayList(); + List eligible = new ArrayList(); boolean forceMergeRunning = false; - final Collection merging = writer.get().getMergingSegments(); + final Collection merging = writer.get().getMergingSegments(); boolean segmentIsOriginal = false; - for(SegmentInfo info : infos) { + for(SegmentInfoPerCommit info : infos) { final Boolean isOriginal = segmentsToMerge.get(info); if (isOriginal != null) { segmentIsOriginal = isOriginal; @@ -560,10 +560,10 @@ public class TieredMergePolicy extends MergePolicy { if (verbose()) { message("findForcedDeletesMerges infos=" + writer.get().segString(infos) + " forceMergeDeletesPctAllowed=" + forceMergeDeletesPctAllowed); } - final List eligible = new ArrayList(); - final Collection merging = writer.get().getMergingSegments(); - for(SegmentInfo info : infos) { - double pctDeletes = 100.*((double) writer.get().numDeletedDocs(info))/info.docCount; + final List eligible = new ArrayList(); + final Collection merging = writer.get().getMergingSegments(); + for(SegmentInfoPerCommit info : infos) { + double pctDeletes = 100.*((double) writer.get().numDeletedDocs(info))/info.info.docCount; if (pctDeletes > forceMergeDeletesPctAllowed && !merging.contains(info)) { eligible.add(info); } @@ -603,7 +603,7 @@ public class TieredMergePolicy extends MergePolicy { } @Override - public boolean useCompoundFile(SegmentInfos infos, SegmentInfo mergedInfo) throws IOException { + public boolean useCompoundFile(SegmentInfos infos, SegmentInfoPerCommit mergedInfo) throws IOException { final boolean doCFS; if (!useCompoundFile) { @@ -612,8 +612,9 @@ public class TieredMergePolicy extends MergePolicy { doCFS = true; } else { long totalSize = 0; - for (SegmentInfo info : infos) + for (SegmentInfoPerCommit info : infos) { totalSize += size(info); + } doCFS = size(mergedInfo) <= noCFSRatio * totalSize; } @@ -624,22 +625,22 @@ public class TieredMergePolicy extends MergePolicy { public void close() { } - private boolean isMerged(SegmentInfo info) + private boolean isMerged(SegmentInfoPerCommit info) throws IOException { IndexWriter w = writer.get(); assert w != null; boolean hasDeletions = w.numDeletedDocs(info) > 0; return !hasDeletions && - !info.hasSeparateNorms() && - info.dir == w.getDirectory() && - (info.getUseCompoundFile() == useCompoundFile || noCFSRatio < 1.0); + !info.info.hasSeparateNorms() && + info.info.dir == w.getDirectory() && + (info.info.getUseCompoundFile() == useCompoundFile || noCFSRatio < 1.0); } // Segment size in bytes, pro-rated by % deleted - private long size(SegmentInfo info) throws IOException { - final long byteSize = info.sizeInBytes(); + private long size(SegmentInfoPerCommit info) throws IOException { + final long byteSize = info.info.sizeInBytes(); final int delCount = writer.get().numDeletedDocs(info); - final double delRatio = (info.docCount <= 0 ? 0.0f : ((double)delCount / (double)info.docCount)); + final double delRatio = (info.info.docCount <= 0 ? 0.0f : ((double)delCount / (double)info.info.docCount)); assert delRatio <= 1.0; return (long) (byteSize * (1.0-delRatio)); } diff --git a/lucene/core/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java index 51fdb2036a5..f16ce5f9538 100644 --- a/lucene/core/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java +++ b/lucene/core/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java @@ -63,8 +63,8 @@ public class UpgradeIndexMergePolicy extends MergePolicy { * so all segments created with a different version number than this Lucene version will * get upgraded. */ - protected boolean shouldUpgradeSegment(SegmentInfo si) { - return !Constants.LUCENE_MAIN_VERSION.equals(si.getVersion()); + protected boolean shouldUpgradeSegment(SegmentInfoPerCommit si) { + return !Constants.LUCENE_MAIN_VERSION.equals(si.info.getVersion()); } @Override @@ -79,10 +79,10 @@ public class UpgradeIndexMergePolicy extends MergePolicy { } @Override - public MergeSpecification findForcedMerges(SegmentInfos segmentInfos, int maxSegmentCount, Map segmentsToMerge) throws CorruptIndexException, IOException { + public MergeSpecification findForcedMerges(SegmentInfos segmentInfos, int maxSegmentCount, Map segmentsToMerge) throws CorruptIndexException, IOException { // first find all old segments - final Map oldSegments = new HashMap(); - for (final SegmentInfo si : segmentInfos) { + final Map oldSegments = new HashMap(); + for (final SegmentInfoPerCommit si : segmentInfos) { final Boolean v = segmentsToMerge.get(si); if (v != null && shouldUpgradeSegment(si)) { oldSegments.put(si, v); @@ -112,8 +112,8 @@ public class UpgradeIndexMergePolicy extends MergePolicy { message("findForcedMerges: " + base.getClass().getSimpleName() + " does not want to merge all old segments, merge remaining ones into new segment: " + oldSegments); } - final List newInfos = new ArrayList(); - for (final SegmentInfo si : segmentInfos) { + final List newInfos = new ArrayList(); + for (final SegmentInfoPerCommit si : segmentInfos) { if (oldSegments.containsKey(si)) { newInfos.add(si); } @@ -134,7 +134,7 @@ public class UpgradeIndexMergePolicy extends MergePolicy { } @Override - public boolean useCompoundFile(SegmentInfos segments, SegmentInfo newSegment) throws IOException { + public boolean useCompoundFile(SegmentInfos segments, SegmentInfoPerCommit newSegment) throws IOException { return base.useCompoundFile(segments, newSegment); } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java b/lucene/core/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java index d33a4e923db..aa2fb62a372 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java @@ -682,8 +682,8 @@ public class TestBackwardsCompatibility extends LuceneTestCase { if (VERBOSE) { System.out.println("checkAllSegmentsUpgraded: " + infos); } - for (SegmentInfo si : infos) { - assertEquals(Constants.LUCENE_MAIN_VERSION, si.getVersion()); + for (SegmentInfoPerCommit si : infos) { + assertEquals(Constants.LUCENE_MAIN_VERSION, si.info.getVersion()); } return infos.size(); } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java b/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java index 7c9d404ae56..2cc6df5b89e 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java @@ -257,7 +257,7 @@ public class TestCodecs extends LuceneTestCase { this.write(fieldInfos, dir, fields, true); Codec codec = Codec.getDefault(); final SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, -1, SEGMENT, false, null, false, - 0, codec, null); + codec, null); final FieldsProducer reader = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random()), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR)); @@ -314,7 +314,7 @@ public class TestCodecs extends LuceneTestCase { this.write(fieldInfos, dir, fields, false); Codec codec = Codec.getDefault(); final SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, -1, - SEGMENT, false, null, false, 0, + SEGMENT, false, null, false, codec, null); if (VERBOSE) { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java b/lucene/core/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java index b14d100dc9d..43b20003f4e 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java @@ -69,8 +69,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { sis.read(dir); assertEquals(2, sis.size()); - FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0)); - FieldInfos fis2 = _TestUtil.getFieldInfos(sis.info(1)); + FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0).info); + FieldInfos fis2 = _TestUtil.getFieldInfos(sis.info(1).info); assertEquals("f1", fis1.fieldInfo(0).name); assertEquals("f2", fis1.fieldInfo(1).name); @@ -87,7 +87,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { sis.read(dir); assertEquals(1, sis.size()); - FieldInfos fis3 = _TestUtil.getFieldInfos(sis.info(0)); + FieldInfos fis3 = _TestUtil.getFieldInfos(sis.info(0).info); assertEquals("f1", fis3.fieldInfo(0).name); assertEquals("f2", fis3.fieldInfo(1).name); @@ -132,8 +132,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { sis.read(dir1); assertEquals(2, sis.size()); - FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0)); - FieldInfos fis2 = _TestUtil.getFieldInfos(sis.info(1)); + FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0).info); + FieldInfos fis2 = _TestUtil.getFieldInfos(sis.info(1).info); assertEquals("f1", fis1.fieldInfo(0).name); assertEquals("f2", fis1.fieldInfo(1).name); @@ -163,7 +163,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { SegmentInfos sis = new SegmentInfos(); sis.read(dir); assertEquals(1, sis.size()); - FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0)); + FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0).info); assertEquals("f1", fis1.fieldInfo(0).name); assertEquals("f2", fis1.fieldInfo(1).name); } @@ -182,8 +182,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { SegmentInfos sis = new SegmentInfos(); sis.read(dir); assertEquals(2, sis.size()); - FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0)); - FieldInfos fis2 = _TestUtil.getFieldInfos(sis.info(1)); + FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0).info); + FieldInfos fis2 = _TestUtil.getFieldInfos(sis.info(1).info); assertEquals("f1", fis1.fieldInfo(0).name); assertEquals("f2", fis1.fieldInfo(1).name); assertEquals("f1", fis2.fieldInfo(0).name); @@ -205,9 +205,9 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { SegmentInfos sis = new SegmentInfos(); sis.read(dir); assertEquals(3, sis.size()); - FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0)); - FieldInfos fis2 = _TestUtil.getFieldInfos(sis.info(1)); - FieldInfos fis3 = _TestUtil.getFieldInfos(sis.info(2)); + FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0).info); + FieldInfos fis2 = _TestUtil.getFieldInfos(sis.info(1).info); + FieldInfos fis3 = _TestUtil.getFieldInfos(sis.info(2).info); assertEquals("f1", fis1.fieldInfo(0).name); assertEquals("f2", fis1.fieldInfo(1).name); assertEquals("f1", fis2.fieldInfo(0).name); @@ -239,7 +239,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { SegmentInfos sis = new SegmentInfos(); sis.read(dir); assertEquals(1, sis.size()); - FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0)); + FieldInfos fis1 = _TestUtil.getFieldInfos(sis.info(0).info); assertEquals("f1", fis1.fieldInfo(0).name); assertEquals("f2", fis1.fieldInfo(1).name); assertEquals("f3", fis1.fieldInfo(2).name); @@ -276,8 +276,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { SegmentInfos sis = new SegmentInfos(); sis.read(dir); - for (SegmentInfo si : sis) { - FieldInfos fis = _TestUtil.getFieldInfos(si); + for (SegmentInfoPerCommit si : sis) { + FieldInfos fis = _TestUtil.getFieldInfos(si.info); for (FieldInfo fi : fis) { Field expected = getField(Integer.parseInt(fi.name)); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDoc.java b/lucene/core/src/test/org/apache/lucene/index/TestDoc.java index d30f4a8925b..3f8580a787c 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDoc.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDoc.java @@ -118,20 +118,20 @@ public class TestDoc extends LuceneTestCase { setMergePolicy(newLogMergePolicy(10)) ); - SegmentInfo si1 = indexDoc(writer, "test.txt"); + SegmentInfoPerCommit si1 = indexDoc(writer, "test.txt"); printSegment(out, si1); - SegmentInfo si2 = indexDoc(writer, "test2.txt"); + SegmentInfoPerCommit si2 = indexDoc(writer, "test2.txt"); printSegment(out, si2); writer.close(); - SegmentInfo siMerge = merge(directory, si1, si2, "merge", false); + SegmentInfoPerCommit siMerge = merge(directory, si1, si2, "merge", false); printSegment(out, siMerge); - SegmentInfo siMerge2 = merge(directory, si1, si2, "merge2", false); + SegmentInfoPerCommit siMerge2 = merge(directory, si1, si2, "merge2", false); printSegment(out, siMerge2); - SegmentInfo siMerge3 = merge(directory, siMerge, siMerge2, "merge3", false); + SegmentInfoPerCommit siMerge3 = merge(directory, siMerge, siMerge2, "merge3", false); printSegment(out, siMerge3); directory.close(); @@ -177,7 +177,7 @@ public class TestDoc extends LuceneTestCase { assertEquals(multiFileOutput, singleFileOutput); } - private SegmentInfo indexDoc(IndexWriter writer, String fileName) + private SegmentInfoPerCommit indexDoc(IndexWriter writer, String fileName) throws Exception { File file = new File(workDir, fileName); @@ -189,14 +189,14 @@ public class TestDoc extends LuceneTestCase { } - private SegmentInfo merge(Directory dir, SegmentInfo si1, SegmentInfo si2, String merged, boolean useCompoundFile) + private SegmentInfoPerCommit merge(Directory dir, SegmentInfoPerCommit si1, SegmentInfoPerCommit si2, String merged, boolean useCompoundFile) throws Exception { IOContext context = newIOContext(random()); SegmentReader r1 = new SegmentReader(si1, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, context); SegmentReader r2 = new SegmentReader(si2, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, context); final Codec codec = Codec.getDefault(); - TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.dir); + TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.info.dir); SegmentMerger merger = new SegmentMerger(InfoStream.getDefault(), trackingDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, merged, MergeState.CheckAbort.NONE, null, new FieldInfos.Builder(), codec, context); merger.add(r1); @@ -204,24 +204,24 @@ public class TestDoc extends LuceneTestCase { MergeState mergeState = merger.merge(); r1.close(); r2.close(); - final SegmentInfo info = new SegmentInfo(si1.dir, Constants.LUCENE_MAIN_VERSION, merged, - si1.docCount + si2.docCount, -1, merged, - false, null, false, 0, codec, null); + final SegmentInfo info = new SegmentInfo(si1.info.dir, Constants.LUCENE_MAIN_VERSION, merged, + si1.info.docCount + si2.info.docCount, -1, merged, + false, null, false, codec, null); info.setFiles(new HashSet(trackingDir.getCreatedFiles())); if (useCompoundFile) { Collection filesToDelete = IndexWriter.createCompoundFile(InfoStream.getDefault(), dir, MergeState.CheckAbort.NONE, info, newIOContext(random())); info.setUseCompoundFile(true); for (final String fileToDelete : filesToDelete) { - si1.dir.deleteFile(fileToDelete); + si1.info.dir.deleteFile(fileToDelete); } } - return info; + return new SegmentInfoPerCommit(info, 0, -1L); } - private void printSegment(PrintWriter out, SegmentInfo si) + private void printSegment(PrintWriter out, SegmentInfoPerCommit si) throws Exception { SegmentReader reader = new SegmentReader(si, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random())); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java index 64607b60a69..eab5a6384ea 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java @@ -62,7 +62,7 @@ public class TestDocumentWriter extends LuceneTestCase { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); writer.addDocument(testDoc); writer.commit(); - SegmentInfo info = writer.newestSegment(); + SegmentInfoPerCommit info = writer.newestSegment(); writer.close(); //After adding the document, we should be able to read it back in SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random())); @@ -124,7 +124,7 @@ public class TestDocumentWriter extends LuceneTestCase { writer.addDocument(doc); writer.commit(); - SegmentInfo info = writer.newestSegment(); + SegmentInfoPerCommit info = writer.newestSegment(); writer.close(); SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random())); @@ -196,7 +196,7 @@ public class TestDocumentWriter extends LuceneTestCase { writer.addDocument(doc); writer.commit(); - SegmentInfo info = writer.newestSegment(); + SegmentInfoPerCommit info = writer.newestSegment(); writer.close(); SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random())); @@ -240,7 +240,7 @@ public class TestDocumentWriter extends LuceneTestCase { writer.addDocument(doc); writer.commit(); - SegmentInfo info = writer.newestSegment(); + SegmentInfoPerCommit info = writer.newestSegment(); writer.close(); SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random())); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java index 63d26126ed6..86f292c2c2b 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java @@ -1526,7 +1526,7 @@ public class TestIndexWriter extends LuceneTestCase { assertNoUnreferencedFiles(dir, "no tv files"); DirectoryReader r0 = IndexReader.open(dir); for (IndexReader r : r0.getSequentialSubReaders()) { - SegmentInfo s = ((SegmentReader) r).getSegmentInfo(); + SegmentInfoPerCommit s = ((SegmentReader) r).getSegmentInfo(); assertFalse(((SegmentReader) r).getFieldInfos().hasVectors()); // nocommit /* diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMerging.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMerging.java index bc8aa4edba8..261a75dad7a 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMerging.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMerging.java @@ -321,7 +321,7 @@ public class TestIndexWriterMerging extends LuceneTestCase break; } for(int i=0;i segmentsToMerge) + int maxSegmentCount, Map segmentsToMerge) throws CorruptIndexException, IOException { return null; } @@ -282,7 +282,7 @@ public class TestPerSegmentDeletes extends LuceneTestCase { } @Override - public boolean useCompoundFile(SegmentInfos segments, SegmentInfo newSegment) { + public boolean useCompoundFile(SegmentInfos segments, SegmentInfoPerCommit newSegment) { return useCompoundFile; } } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java index 5c21f801577..a0b2f2b27fb 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java @@ -50,9 +50,9 @@ public class TestSegmentMerger extends LuceneTestCase { merge1Dir = newDirectory(); merge2Dir = newDirectory(); DocHelper.setupDoc(doc1); - SegmentInfo info1 = DocHelper.writeDoc(random(), merge1Dir, doc1); + SegmentInfoPerCommit info1 = DocHelper.writeDoc(random(), merge1Dir, doc1); DocHelper.setupDoc(doc2); - SegmentInfo info2 = DocHelper.writeDoc(random(), merge2Dir, doc2); + SegmentInfoPerCommit info2 = DocHelper.writeDoc(random(), merge2Dir, doc2); reader1 = new SegmentReader(info1, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random())); reader2 = new SegmentReader(info2, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random())); } @@ -84,8 +84,10 @@ public class TestSegmentMerger extends LuceneTestCase { int docsMerged = mergeState.mergedDocCount; assertTrue(docsMerged == 2); //Should be able to open a new SegmentReader against the new directory - SegmentReader mergedReader = new SegmentReader(new SegmentInfo(mergedDir, Constants.LUCENE_MAIN_VERSION, mergedSegment, docsMerged, -1, mergedSegment, - false, null, false, 0, codec, null), + SegmentReader mergedReader = new SegmentReader(new SegmentInfoPerCommit( + new SegmentInfo(mergedDir, Constants.LUCENE_MAIN_VERSION, mergedSegment, docsMerged, -1, mergedSegment, + false, null, false, codec, null), + 0, -1L), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random())); assertTrue(mergedReader != null); assertTrue(mergedReader.numDocs() == 2); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java index 8577e93a5dc..31c77b6e925 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java @@ -41,7 +41,7 @@ public class TestSegmentReader extends LuceneTestCase { super.setUp(); dir = newDirectory(); DocHelper.setupDoc(testDoc); - SegmentInfo info = DocHelper.writeDoc(random(), dir, testDoc); + SegmentInfoPerCommit info = DocHelper.writeDoc(random(), dir, testDoc); reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, IOContext.READ); } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java index 4d5d9fb5d8e..27f4bc33cb1 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java @@ -31,7 +31,7 @@ import org.apache.lucene.util._TestUtil; public class TestSegmentTermDocs extends LuceneTestCase { private Document testDoc = new Document(); private Directory dir; - private SegmentInfo info; + private SegmentInfoPerCommit info; @Override public void setUp() throws Exception { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java b/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java index 5fed32b4fc9..b2706c0f109 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java @@ -47,7 +47,7 @@ public class TestTermVectorsReader extends LuceneTestCase { private String[] testTerms = {"this", "is", "a", "test"}; private int[][] positions = new int[testTerms.length][]; private Directory dir; - private SegmentInfo seg; + private SegmentInfoPerCommit seg; private FieldInfos fieldInfos = new FieldInfos(new FieldInfo[0]); private static int TERM_FREQ = 3; @@ -128,7 +128,7 @@ public class TestTermVectorsReader extends LuceneTestCase { seg = writer.newestSegment(); writer.close(); - fieldInfos = _TestUtil.getFieldInfos(seg); + fieldInfos = _TestUtil.getFieldInfos(seg.info); } @Override @@ -187,7 +187,7 @@ public class TestTermVectorsReader extends LuceneTestCase { //Check to see the files were created properly in setup DirectoryReader reader = IndexReader.open(dir); for (IndexReader r : reader.getSequentialSubReaders()) { - SegmentInfo s = ((SegmentReader) r).getSegmentInfo(); + SegmentInfoPerCommit s = ((SegmentReader) r).getSegmentInfo(); assertTrue(((SegmentReader) r).getFieldInfos().hasVectors()); // nocommit @@ -204,7 +204,7 @@ public class TestTermVectorsReader extends LuceneTestCase { } public void testReader() throws IOException { - TermVectorsReader reader = Codec.getDefault().termVectorsFormat().vectorsReader(dir, seg, fieldInfos, newIOContext(random())); + TermVectorsReader reader = Codec.getDefault().termVectorsFormat().vectorsReader(dir, seg.info, fieldInfos, newIOContext(random())); for (int j = 0; j < 5; j++) { Terms vector = reader.get(j).terms(testFields[0]); assertNotNull(vector); @@ -223,7 +223,7 @@ public class TestTermVectorsReader extends LuceneTestCase { } public void testDocsEnum() throws IOException { - TermVectorsReader reader = Codec.getDefault().termVectorsFormat().vectorsReader(dir, seg, fieldInfos, newIOContext(random())); + TermVectorsReader reader = Codec.getDefault().termVectorsFormat().vectorsReader(dir, seg.info, fieldInfos, newIOContext(random())); for (int j = 0; j < 5; j++) { Terms vector = reader.get(j).terms(testFields[0]); assertNotNull(vector); @@ -250,7 +250,7 @@ public class TestTermVectorsReader extends LuceneTestCase { } public void testPositionReader() throws IOException { - TermVectorsReader reader = Codec.getDefault().termVectorsFormat().vectorsReader(dir, seg, fieldInfos, newIOContext(random())); + TermVectorsReader reader = Codec.getDefault().termVectorsFormat().vectorsReader(dir, seg.info, fieldInfos, newIOContext(random())); BytesRef[] terms; Terms vector = reader.get(0).terms(testFields[0]); assertNotNull(vector); @@ -305,7 +305,7 @@ public class TestTermVectorsReader extends LuceneTestCase { } public void testOffsetReader() throws IOException { - TermVectorsReader reader = Codec.getDefault().termVectorsFormat().vectorsReader(dir, seg, fieldInfos, newIOContext(random())); + TermVectorsReader reader = Codec.getDefault().termVectorsFormat().vectorsReader(dir, seg.info, fieldInfos, newIOContext(random())); Terms vector = reader.get(0).terms(testFields[0]); assertNotNull(vector); TermsEnum termsEnum = vector.iterator(null); @@ -347,7 +347,7 @@ public class TestTermVectorsReader extends LuceneTestCase { public void testBadParams() throws IOException { TermVectorsReader reader = null; try { - reader = Codec.getDefault().termVectorsFormat().vectorsReader(dir, seg, fieldInfos, newIOContext(random())); + reader = Codec.getDefault().termVectorsFormat().vectorsReader(dir, seg.info, fieldInfos, newIOContext(random())); //Bad document number, good field number reader.get(50); fail(); @@ -356,7 +356,7 @@ public class TestTermVectorsReader extends LuceneTestCase { } finally { reader.close(); } - reader = Codec.getDefault().termVectorsFormat().vectorsReader(dir, seg, fieldInfos, newIOContext(random())); + reader = Codec.getDefault().termVectorsFormat().vectorsReader(dir, seg.info, fieldInfos, newIOContext(random())); //good document number, bad field Terms vector = reader.get(0).terms("f50"); assertNull(vector); diff --git a/lucene/misc/src/java/org/apache/lucene/index/BalancedSegmentMergePolicy.java b/lucene/misc/src/java/org/apache/lucene/index/BalancedSegmentMergePolicy.java index ea36b56df06..be1f22b4e7b 100644 --- a/lucene/misc/src/java/org/apache/lucene/index/BalancedSegmentMergePolicy.java +++ b/lucene/misc/src/java/org/apache/lucene/index/BalancedSegmentMergePolicy.java @@ -57,10 +57,10 @@ public class BalancedSegmentMergePolicy extends LogByteSizeMergePolicy { } @Override - protected long size(SegmentInfo info) throws IOException { + protected long size(SegmentInfoPerCommit info) throws IOException { long byteSize = info.sizeInBytes(); - float delRatio = (info.docCount <= 0 ? 0.0f : ((float)info.getDelCount() / (float)info.docCount)); - return (info.docCount <= 0 ? byteSize : (long)((1.0f - delRatio) * byteSize)); + float delRatio = (info.info.docCount <= 0 ? 0.0f : ((float)info.getDelCount() / (float)info.info.docCount)); + return (info.info.docCount <= 0 ? byteSize : (long)((1.0f - delRatio) * byteSize)); } public void setPartialExpunge(boolean doPartialExpunge) { @@ -106,7 +106,7 @@ public class BalancedSegmentMergePolicy extends LogByteSizeMergePolicy { } @Override - public MergeSpecification findForcedMerges(SegmentInfos infos, int maxNumSegments, Map segmentsToMerge) throws IOException { + public MergeSpecification findForcedMerges(SegmentInfos infos, int maxNumSegments, Map segmentsToMerge) throws IOException { assert maxNumSegments > 0; @@ -120,7 +120,7 @@ public class BalancedSegmentMergePolicy extends LogByteSizeMergePolicy { int last = infos.size(); while(last > 0) { - final SegmentInfo info = infos.info(--last); + final SegmentInfoPerCommit info = infos.info(--last); if (segmentsToMerge.containsKey(info)) { last++; break; @@ -196,7 +196,7 @@ public class BalancedSegmentMergePolicy extends LogByteSizeMergePolicy { spec.add(new OneMerge(infos.asList().subList(mergeStart, mergeEnd))); } else { if(partialExpunge) { - SegmentInfo info = infos.info(mergeStart); + SegmentInfoPerCommit info = infos.info(mergeStart); int delCount = info.getDelCount(); if(delCount > maxDelCount) { expungeCandidate = mergeStart; @@ -260,8 +260,8 @@ public class BalancedSegmentMergePolicy extends LogByteSizeMergePolicy { if(spec == null) spec = new MergeSpecification(); for(int i = 0; i < numLargeSegs; i++) { - SegmentInfo info = infos.info(i); - if(info.hasDeletions()) { + SegmentInfoPerCommit info = infos.info(i); + if (info.hasDeletions()) { spec.add(new OneMerge(Collections.singletonList(infos.info(i)))); } } @@ -279,7 +279,7 @@ public class BalancedSegmentMergePolicy extends LogByteSizeMergePolicy { long totalLargeSegSize = 0; long totalSmallSegSize = 0; - SegmentInfo info; + SegmentInfoPerCommit info; // compute the total size of large segments for(int i = 0; i < numLargeSegs; i++) { @@ -340,7 +340,7 @@ public class BalancedSegmentMergePolicy extends LogByteSizeMergePolicy { int maxDelCount = 0; for(int i = maxNumSegments - 1; i >= 0; i--) { - SegmentInfo info = infos.info(i); + SegmentInfoPerCommit info = infos.info(i); int delCount = info.getDelCount(); if (delCount > maxDelCount) { expungeCandidate = i; diff --git a/lucene/misc/src/java/org/apache/lucene/index/IndexSplitter.java b/lucene/misc/src/java/org/apache/lucene/index/IndexSplitter.java index 9cc3cac0aec..8a5956dadde 100644 --- a/lucene/misc/src/java/org/apache/lucene/index/IndexSplitter.java +++ b/lucene/misc/src/java/org/apache/lucene/index/IndexSplitter.java @@ -105,23 +105,23 @@ public class IndexSplitter { public void listSegments() throws IOException { DecimalFormat formatter = new DecimalFormat("###,###.###"); for (int x = 0; x < infos.size(); x++) { - SegmentInfo info = infos.info(x); + SegmentInfoPerCommit info = infos.info(x); String sizeStr = formatter.format(info.sizeInBytes()); - System.out.println(info.name + " " + sizeStr); + System.out.println(info.info.name + " " + sizeStr); } } private int getIdx(String name) { for (int x = 0; x < infos.size(); x++) { - if (name.equals(infos.info(x).name)) + if (name.equals(infos.info(x).info.name)) return x; } return -1; } - private SegmentInfo getInfo(String name) { + private SegmentInfoPerCommit getInfo(String name) { for (int x = 0; x < infos.size(); x++) { - if (name.equals(infos.info(x).name)) + if (name.equals(infos.info(x).info.name)) return infos.info(x); } return null; @@ -142,14 +142,16 @@ public class IndexSplitter { SegmentInfos destInfos = new SegmentInfos(); destInfos.counter = infos.counter; for (String n : segs) { - SegmentInfo info = getInfo(n); + SegmentInfoPerCommit infoPerCommit = getInfo(n); + SegmentInfo info = infoPerCommit.info; // Same info just changing the dir: SegmentInfo newInfo = new SegmentInfo(destFSDir, info.getVersion(), info.name, info.docCount, info.getDocStoreOffset(), info.getDocStoreSegment(), info.getDocStoreIsCompoundFile(), info.getNormGen(), info.getUseCompoundFile(), - info.getDelCount(), info.getCodec(), info.getDiagnostics()); - destInfos.add(newInfo); + info.getCodec(), info.getDiagnostics()); + destInfos.add(new SegmentInfoPerCommit(newInfo, infoPerCommit.getDelCount(), infoPerCommit.getDelGen())); + // nocommit is this right...? // now copy files over - Collection files = info.files(); + Collection files = infoPerCommit.files(); for (final String srcName : files) { File srcFile = new File(dir, srcName); File destFile = new File(destDir, srcName); diff --git a/lucene/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java b/lucene/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java index 3bba9ac37b5..d711756247f 100644 --- a/lucene/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java +++ b/lucene/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java @@ -64,7 +64,7 @@ public class TestIndexSplitter extends LuceneTestCase { iw.close(); // we should have 2 segments now IndexSplitter is = new IndexSplitter(dir); - String splitSegName = is.infos.info(1).name; + String splitSegName = is.infos.info(1).info.name; is.split(destDir, new String[] {splitSegName}); Directory fsDirDest = newFSDirectory(destDir); DirectoryReader r = DirectoryReader.open(fsDirDest); diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/AlcoholicMergePolicy.java b/lucene/test-framework/src/java/org/apache/lucene/index/AlcoholicMergePolicy.java index ad8ef0ebfff..fa205503fb4 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/AlcoholicMergePolicy.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/AlcoholicMergePolicy.java @@ -52,7 +52,7 @@ public class AlcoholicMergePolicy extends LogMergePolicy { @Override //@BlackMagic(level=Voodoo); - protected long size(SegmentInfo info) throws IOException { + protected long size(SegmentInfoPerCommit info) throws IOException { int hourOfDay = calendar.get(Calendar.HOUR_OF_DAY); if (hourOfDay < 6 || hourOfDay > 20 || diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/DocHelper.java b/lucene/test-framework/src/java/org/apache/lucene/index/DocHelper.java index f7f10ac28d3..ced221d29e8 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/DocHelper.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/DocHelper.java @@ -261,7 +261,7 @@ class DocHelper { * @param doc * @throws IOException */ - public static SegmentInfo writeDoc(Random random, Directory dir, Document doc) throws IOException + public static SegmentInfoPerCommit writeDoc(Random random, Directory dir, Document doc) throws IOException { return writeDoc(random, dir, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false), null, doc); } @@ -276,13 +276,13 @@ class DocHelper { * @param doc * @throws IOException */ - public static SegmentInfo writeDoc(Random random, Directory dir, Analyzer analyzer, Similarity similarity, Document doc) throws IOException { + public static SegmentInfoPerCommit writeDoc(Random random, Directory dir, Analyzer analyzer, Similarity similarity, Document doc) throws IOException { IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig( /* LuceneTestCase.newIndexWriterConfig(random, */ TEST_VERSION_CURRENT, analyzer).setSimilarity(similarity)); //writer.setUseCompoundFile(false); writer.addDocument(doc); writer.commit(); - SegmentInfo info = writer.newestSegment(); + SegmentInfoPerCommit info = writer.newestSegment(); writer.close(); return info; } diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java b/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java index 74dc06c3256..fdfc224efda 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java @@ -45,7 +45,7 @@ public class MockRandomMergePolicy extends MergePolicy { if (segmentInfos.size() > 1 && random.nextInt(5) == 3) { - List segments = new ArrayList(segmentInfos.asList()); + List segments = new ArrayList(segmentInfos.asList()); Collections.shuffle(segments, random); // TODO: sometimes make more than 1 merge? @@ -59,11 +59,11 @@ public class MockRandomMergePolicy extends MergePolicy { @Override public MergeSpecification findForcedMerges( - SegmentInfos segmentInfos, int maxSegmentCount, Map segmentsToMerge) + SegmentInfos segmentInfos, int maxSegmentCount, Map segmentsToMerge) throws CorruptIndexException, IOException { - final List eligibleSegments = new ArrayList(); - for(SegmentInfo info : segmentInfos) { + final List eligibleSegments = new ArrayList(); + for(SegmentInfoPerCommit info : segmentInfos) { if (segmentsToMerge.containsKey(info)) { eligibleSegments.add(info); } @@ -87,7 +87,7 @@ public class MockRandomMergePolicy extends MergePolicy { if (mergeSpec != null) { for(OneMerge merge : mergeSpec.merges) { - for(SegmentInfo info : merge.segments) { + for(SegmentInfoPerCommit info : merge.segments) { assert segmentsToMerge.containsKey(info); } } @@ -107,7 +107,7 @@ public class MockRandomMergePolicy extends MergePolicy { } @Override - public boolean useCompoundFile(SegmentInfos infos, SegmentInfo mergedInfo) throws IOException { + public boolean useCompoundFile(SegmentInfos infos, SegmentInfoPerCommit mergedInfo) throws IOException { // 80% of the time we create CFS: return random.nextInt(5) != 1; }