Cut over to unwrap segment reader (#33843)

The fix in #33757 introduces some workaround since FilterCodecReader didn't
support unwrapping. This cuts over to a more elegant fix to access the readers
segment infos.
This commit is contained in:
Simon Willnauer 2018-09-19 10:18:03 +02:00 committed by GitHub
parent d22b383b9c
commit 251489d59a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 17 additions and 70 deletions

View File

@ -30,6 +30,7 @@ import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FilterCodecReader;
import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.FilterDirectoryReader;
import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.FilterLeafReader;
import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexCommit;
@ -726,6 +727,9 @@ public class Lucene {
} else if (reader instanceof FilterLeafReader) { } else if (reader instanceof FilterLeafReader) {
final FilterLeafReader fReader = (FilterLeafReader) reader; final FilterLeafReader fReader = (FilterLeafReader) reader;
return segmentReader(FilterLeafReader.unwrap(fReader)); return segmentReader(FilterLeafReader.unwrap(fReader));
} else if (reader instanceof FilterCodecReader) {
final FilterCodecReader fReader = (FilterCodecReader) reader;
return segmentReader(FilterCodecReader.unwrap(fReader));
} }
// hard fail - we can't get a SegmentReader // hard fail - we can't get a SegmentReader
throw new IllegalStateException("Can not extract segment reader from given index reader [" + reader + "]"); throw new IllegalStateException("Can not extract segment reader from given index reader [" + reader + "]");

View File

@ -81,31 +81,21 @@ public class SourceOnlySnapshot {
String segmentFileName; String segmentFileName;
try (Lock writeLock = targetDirectory.obtainLock(IndexWriter.WRITE_LOCK_NAME); try (Lock writeLock = targetDirectory.obtainLock(IndexWriter.WRITE_LOCK_NAME);
StandardDirectoryReader reader = (StandardDirectoryReader) DirectoryReader.open(commit)) { StandardDirectoryReader reader = (StandardDirectoryReader) DirectoryReader.open(commit)) {
SegmentInfos segmentInfos = reader.getSegmentInfos(); SegmentInfos segmentInfos = reader.getSegmentInfos().clone();
DirectoryReader wrappedReader = wrapReader(reader);
List<SegmentCommitInfo> newInfos = new ArrayList<>(); List<SegmentCommitInfo> newInfos = new ArrayList<>();
for (LeafReaderContext ctx : reader.leaves()) { for (LeafReaderContext ctx : wrappedReader.leaves()) {
LeafReader leafReader = ctx.reader(); LeafReader leafReader = ctx.reader();
SegmentCommitInfo info = reader.getSegmentInfos().info(ctx.ord); SegmentCommitInfo info = Lucene.segmentReader(leafReader).getSegmentInfo();
assert info.info.equals(Lucene.segmentReader(ctx.reader()).getSegmentInfo().info); LiveDocs liveDocs = getLiveDocs(leafReader);
/* We could do this totally different without wrapping this dummy directory reader if FilterCodecReader would have a if (leafReader.numDocs() != 0) { // fully deleted segments don't need to be processed
* getDelegate method. This is fixed in LUCENE-8502 but we need to wait for it to come in 7.5.1 or 7.6. SegmentCommitInfo newInfo = syncSegment(info, liveDocs, leafReader.getFieldInfos(), existingSegments, createdFiles);
* The reason here is that the ctx.ord is not guaranteed to be equivalent to the SegmentCommitInfo ord in the SegmentInfo newInfos.add(newInfo);
* object since we might drop fully deleted segments. if that happens we are using the wrong reader for the SI and
* might almost certainly expose deleted documents.
*/
DirectoryReader wrappedReader = wrapReader(new DummyDirectoryReader(reader.directory(), leafReader));
if (wrappedReader.leaves().isEmpty() == false) {
leafReader = wrappedReader.leaves().get(0).reader();
LiveDocs liveDocs = getLiveDocs(leafReader);
if (leafReader.numDocs() != 0) { // fully deleted segments don't need to be processed
SegmentCommitInfo newInfo = syncSegment(info, liveDocs, leafReader.getFieldInfos(), existingSegments, createdFiles);
newInfos.add(newInfo);
}
} }
} }
segmentInfos.clear(); segmentInfos.clear();
segmentInfos.addAll(newInfos); segmentInfos.addAll(newInfos);
segmentInfos.setNextWriteGeneration(Math.max(segmentInfos.getGeneration(), generation)+1); segmentInfos.setNextWriteGeneration(Math.max(segmentInfos.getGeneration(), generation) + 1);
String pendingSegmentFileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.PENDING_SEGMENTS, String pendingSegmentFileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.PENDING_SEGMENTS,
"", segmentInfos.getGeneration()); "", segmentInfos.getGeneration());
try (IndexOutput segnOutput = targetDirectory.createOutput(pendingSegmentFileName, IOContext.DEFAULT)) { try (IndexOutput segnOutput = targetDirectory.createOutput(pendingSegmentFileName, IOContext.DEFAULT)) {
@ -207,9 +197,9 @@ public class SourceOnlySnapshot {
newInfo = new SegmentCommitInfo(newSegmentInfo, 0, 0, -1, -1, -1); newInfo = new SegmentCommitInfo(newSegmentInfo, 0, 0, -1, -1, -1);
List<FieldInfo> fieldInfoCopy = new ArrayList<>(fieldInfos.size()); List<FieldInfo> fieldInfoCopy = new ArrayList<>(fieldInfos.size());
for (FieldInfo fieldInfo : fieldInfos) { for (FieldInfo fieldInfo : fieldInfos) {
fieldInfoCopy.add(new FieldInfo(fieldInfo.name, fieldInfo.number, fieldInfoCopy.add(new FieldInfo(fieldInfo.name, fieldInfo.number,
false, false, false, IndexOptions.NONE, DocValuesType.NONE, -1, fieldInfo.attributes(), 0, 0, false, false, false, IndexOptions.NONE, DocValuesType.NONE, -1, fieldInfo.attributes(), 0, 0,
fieldInfo.isSoftDeletesField())); fieldInfo.isSoftDeletesField()));
} }
FieldInfos newFieldInfos = new FieldInfos(fieldInfoCopy.toArray(new FieldInfo[0])); FieldInfos newFieldInfos = new FieldInfos(fieldInfoCopy.toArray(new FieldInfo[0]));
codec.fieldInfosFormat().write(trackingDir, newSegmentInfo, segmentSuffix, newFieldInfos, IOContext.DEFAULT); codec.fieldInfosFormat().write(trackingDir, newSegmentInfo, segmentSuffix, newFieldInfos, IOContext.DEFAULT);
@ -250,7 +240,7 @@ public class SourceOnlySnapshot {
private boolean assertLiveDocs(Bits liveDocs, int deletes) { private boolean assertLiveDocs(Bits liveDocs, int deletes) {
int actualDeletes = 0; int actualDeletes = 0;
for (int i = 0; i < liveDocs.length(); i++ ) { for (int i = 0; i < liveDocs.length(); i++) {
if (liveDocs.get(i) == false) { if (liveDocs.get(i) == false) {
actualDeletes++; actualDeletes++;
} }
@ -268,51 +258,4 @@ public class SourceOnlySnapshot {
this.bits = bits; this.bits = bits;
} }
} }
private static class DummyDirectoryReader extends DirectoryReader {
protected DummyDirectoryReader(Directory directory, LeafReader... segmentReaders) throws IOException {
super(directory, segmentReaders);
}
@Override
protected DirectoryReader doOpenIfChanged() throws IOException {
return null;
}
@Override
protected DirectoryReader doOpenIfChanged(IndexCommit commit) throws IOException {
return null;
}
@Override
protected DirectoryReader doOpenIfChanged(IndexWriter writer, boolean applyAllDeletes) throws IOException {
return null;
}
@Override
public long getVersion() {
return 0;
}
@Override
public boolean isCurrent() throws IOException {
return false;
}
@Override
public IndexCommit getIndexCommit() throws IOException {
return null;
}
@Override
protected void doClose() throws IOException {
}
@Override
public CacheHelper getReaderCacheHelper() {
return null;
}
}
} }