mirror of https://github.com/apache/lucene.git
LUCENE-10032: Remove leafDocMaps from MergeState (#222)
These maps are no longer useful after LUCENE-8505.
This commit is contained in:
parent
0e6c3146d7
commit
ba417b593f
|
@ -4138,7 +4138,6 @@ public class IndexWriter
|
||||||
assert rld != null : "seg=" + info.info.name;
|
assert rld != null : "seg=" + info.info.name;
|
||||||
|
|
||||||
MergeState.DocMap segDocMap = mergeState.docMaps[i];
|
MergeState.DocMap segDocMap = mergeState.docMaps[i];
|
||||||
MergeState.DocMap segLeafDocMap = mergeState.leafDocMaps[i];
|
|
||||||
|
|
||||||
carryOverHardDeletes(
|
carryOverHardDeletes(
|
||||||
mergedDeletesAndUpdates,
|
mergedDeletesAndUpdates,
|
||||||
|
@ -4146,8 +4145,7 @@ public class IndexWriter
|
||||||
mergeState.liveDocs[i],
|
mergeState.liveDocs[i],
|
||||||
merge.getMergeReader().get(i).hardLiveDocs,
|
merge.getMergeReader().get(i).hardLiveDocs,
|
||||||
rld.getHardLiveDocs(),
|
rld.getHardLiveDocs(),
|
||||||
segDocMap,
|
segDocMap);
|
||||||
segLeafDocMap);
|
|
||||||
|
|
||||||
// Now carry over all doc values updates that were resolved while we were merging, remapping
|
// Now carry over all doc values updates that were resolved while we were merging, remapping
|
||||||
// the docIDs to the newly merged docIDs.
|
// the docIDs to the newly merged docIDs.
|
||||||
|
@ -4200,7 +4198,7 @@ public class IndexWriter
|
||||||
DocValuesFieldUpdates.Iterator it = updates.iterator();
|
DocValuesFieldUpdates.Iterator it = updates.iterator();
|
||||||
int doc;
|
int doc;
|
||||||
while ((doc = it.nextDoc()) != NO_MORE_DOCS) {
|
while ((doc = it.nextDoc()) != NO_MORE_DOCS) {
|
||||||
int mappedDoc = segDocMap.get(segLeafDocMap.get(doc));
|
int mappedDoc = segDocMap.get(doc);
|
||||||
if (mappedDoc != -1) {
|
if (mappedDoc != -1) {
|
||||||
if (it.hasValue()) {
|
if (it.hasValue()) {
|
||||||
// not deleted
|
// not deleted
|
||||||
|
@ -4250,8 +4248,7 @@ public class IndexWriter
|
||||||
Bits mergeLiveDocs, // the liveDocs used to build the segDocMaps
|
Bits mergeLiveDocs, // the liveDocs used to build the segDocMaps
|
||||||
Bits prevHardLiveDocs, // the hard deletes when the merge reader was pulled
|
Bits prevHardLiveDocs, // the hard deletes when the merge reader was pulled
|
||||||
Bits currentHardLiveDocs, // the current hard deletes
|
Bits currentHardLiveDocs, // the current hard deletes
|
||||||
MergeState.DocMap segDocMap,
|
MergeState.DocMap segDocMap)
|
||||||
MergeState.DocMap segLeafDocMap)
|
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
assert mergeLiveDocs == null || mergeLiveDocs.length() == maxDoc;
|
assert mergeLiveDocs == null || mergeLiveDocs.length() == maxDoc;
|
||||||
|
@ -4293,7 +4290,7 @@ public class IndexWriter
|
||||||
assert currentHardLiveDocs.get(j) == false;
|
assert currentHardLiveDocs.get(j) == false;
|
||||||
} else if (carryOverDelete.test(j)) {
|
} else if (carryOverDelete.test(j)) {
|
||||||
// the document was deleted while we were merging:
|
// the document was deleted while we were merging:
|
||||||
mergedReadersAndUpdates.delete(segDocMap.get(segLeafDocMap.get(j)));
|
mergedReadersAndUpdates.delete(segDocMap.get(j));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4303,7 +4300,7 @@ public class IndexWriter
|
||||||
// does:
|
// does:
|
||||||
for (int j = 0; j < maxDoc; j++) {
|
for (int j = 0; j < maxDoc; j++) {
|
||||||
if (carryOverDelete.test(j)) {
|
if (carryOverDelete.test(j)) {
|
||||||
mergedReadersAndUpdates.delete(segDocMap.get(segLeafDocMap.get(j)));
|
mergedReadersAndUpdates.delete(segDocMap.get(j));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.lucene.index;
|
||||||
import static org.apache.lucene.index.IndexWriter.isCongruentSort;
|
import static org.apache.lucene.index.IndexWriter.isCongruentSort;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import org.apache.lucene.codecs.DocValuesProducer;
|
import org.apache.lucene.codecs.DocValuesProducer;
|
||||||
|
@ -45,10 +44,6 @@ public class MergeState {
|
||||||
/** Maps document IDs from old segments to document IDs in the new segment */
|
/** Maps document IDs from old segments to document IDs in the new segment */
|
||||||
public final DocMap[] docMaps;
|
public final DocMap[] docMaps;
|
||||||
|
|
||||||
// Only used by IW when it must remap deletes that arrived against the merging segments while a
|
|
||||||
// merge was running:
|
|
||||||
final DocMap[] leafDocMaps;
|
|
||||||
|
|
||||||
/** {@link SegmentInfo} of the newly merged segment. */
|
/** {@link SegmentInfo} of the newly merged segment. */
|
||||||
public final SegmentInfo segmentInfo;
|
public final SegmentInfo segmentInfo;
|
||||||
|
|
||||||
|
@ -92,15 +87,11 @@ public class MergeState {
|
||||||
public boolean needsIndexSort;
|
public boolean needsIndexSort;
|
||||||
|
|
||||||
/** Sole constructor. */
|
/** Sole constructor. */
|
||||||
MergeState(List<CodecReader> originalReaders, SegmentInfo segmentInfo, InfoStream infoStream)
|
MergeState(List<CodecReader> readers, SegmentInfo segmentInfo, InfoStream infoStream)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
verifyIndexSort(readers, segmentInfo);
|
||||||
this.infoStream = infoStream;
|
this.infoStream = infoStream;
|
||||||
|
int numReaders = readers.size();
|
||||||
final Sort indexSort = segmentInfo.getIndexSort();
|
|
||||||
int numReaders = originalReaders.size();
|
|
||||||
leafDocMaps = new DocMap[numReaders];
|
|
||||||
List<CodecReader> readers = maybeSortReaders(originalReaders, segmentInfo);
|
|
||||||
|
|
||||||
maxDocs = new int[numReaders];
|
maxDocs = new int[numReaders];
|
||||||
fieldsProducers = new FieldsProducer[numReaders];
|
fieldsProducers = new FieldsProducer[numReaders];
|
||||||
|
@ -158,7 +149,7 @@ public class MergeState {
|
||||||
segmentInfo.setMaxDoc(numDocs);
|
segmentInfo.setMaxDoc(numDocs);
|
||||||
|
|
||||||
this.segmentInfo = segmentInfo;
|
this.segmentInfo = segmentInfo;
|
||||||
this.docMaps = buildDocMaps(readers, indexSort);
|
this.docMaps = buildDocMaps(readers, segmentInfo.getIndexSort());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remap docIDs around deletions
|
// Remap docIDs around deletions
|
||||||
|
@ -226,28 +217,12 @@ public class MergeState {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<CodecReader> maybeSortReaders(
|
private static void verifyIndexSort(List<CodecReader> readers, SegmentInfo segmentInfo) {
|
||||||
List<CodecReader> originalReaders, SegmentInfo segmentInfo) throws IOException {
|
|
||||||
|
|
||||||
// Default to identity:
|
|
||||||
for (int i = 0; i < originalReaders.size(); i++) {
|
|
||||||
leafDocMaps[i] =
|
|
||||||
new DocMap() {
|
|
||||||
@Override
|
|
||||||
public int get(int docID) {
|
|
||||||
return docID;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Sort indexSort = segmentInfo.getIndexSort();
|
Sort indexSort = segmentInfo.getIndexSort();
|
||||||
if (indexSort == null) {
|
if (indexSort == null) {
|
||||||
return originalReaders;
|
return;
|
||||||
}
|
}
|
||||||
|
for (CodecReader leaf : readers) {
|
||||||
List<CodecReader> readers = new ArrayList<>(originalReaders.size());
|
|
||||||
|
|
||||||
for (CodecReader leaf : originalReaders) {
|
|
||||||
Sort segmentSort = leaf.getMetaData().getSort();
|
Sort segmentSort = leaf.getMetaData().getSort();
|
||||||
if (segmentSort == null || isCongruentSort(indexSort, segmentSort) == false) {
|
if (segmentSort == null || isCongruentSort(indexSort, segmentSort) == false) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
|
@ -256,10 +231,7 @@ public class MergeState {
|
||||||
+ " but to-be-merged segment has sort="
|
+ " but to-be-merged segment has sort="
|
||||||
+ (segmentSort == null ? "null" : segmentSort));
|
+ (segmentSort == null ? "null" : segmentSort));
|
||||||
}
|
}
|
||||||
readers.add(leaf);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return readers;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** A map of doc IDs. */
|
/** A map of doc IDs. */
|
||||||
|
|
Loading…
Reference in New Issue