mirror of https://github.com/apache/lucene.git
LUCENE-6552: add OneMerge.getMergeInfo
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1684991 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
864e14c6da
commit
c68b04d90c
|
@ -94,6 +94,9 @@ API Changes
|
||||||
* LUCENE-6551: Add missing ConcurrentMergeScheduler.getAutoIOThrottle
|
* LUCENE-6551: Add missing ConcurrentMergeScheduler.getAutoIOThrottle
|
||||||
getter (Simon Willnauer, Mike McCandless)
|
getter (Simon Willnauer, Mike McCandless)
|
||||||
|
|
||||||
|
* LUCENE-6552: Add MergePolicy.OneMerge.getMergeInfo and rename
|
||||||
|
setInfo to setMergeInfo (Simon Willnauer, Mike McCandless)
|
||||||
|
|
||||||
Bug fixes
|
Bug fixes
|
||||||
|
|
||||||
* LUCENE-6500: ParallelCompositeReader did not always call
|
* LUCENE-6500: ParallelCompositeReader did not always call
|
||||||
|
|
|
@ -3753,7 +3753,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
||||||
details.put("mergeMaxNumSegments", "" + merge.maxNumSegments);
|
details.put("mergeMaxNumSegments", "" + merge.maxNumSegments);
|
||||||
details.put("mergeFactor", Integer.toString(merge.segments.size()));
|
details.put("mergeFactor", Integer.toString(merge.segments.size()));
|
||||||
setDiagnostics(si, SOURCE_MERGE, details);
|
setDiagnostics(si, SOURCE_MERGE, details);
|
||||||
merge.setInfo(new SegmentCommitInfo(si, 0, -1L, -1L, -1L));
|
merge.setMergeInfo(new SegmentCommitInfo(si, 0, -1L, -1L, -1L));
|
||||||
|
|
||||||
// System.out.println("[" + Thread.currentThread().getName() + "] IW._mergeInit: " + segString(merge.segments) + " into " + si);
|
// System.out.println("[" + Thread.currentThread().getName() + "] IW._mergeInit: " + segString(merge.segments) + " into " + si);
|
||||||
|
|
||||||
|
@ -3861,7 +3861,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
||||||
|
|
||||||
List<SegmentCommitInfo> sourceSegments = merge.segments;
|
List<SegmentCommitInfo> sourceSegments = merge.segments;
|
||||||
|
|
||||||
IOContext context = new IOContext(merge.getMergeInfo());
|
IOContext context = new IOContext(merge.getStoreMergeInfo());
|
||||||
|
|
||||||
final TrackingDirectoryWrapper dirWrapper = new TrackingDirectoryWrapper(mergeDirectory);
|
final TrackingDirectoryWrapper dirWrapper = new TrackingDirectoryWrapper(mergeDirectory);
|
||||||
|
|
||||||
|
|
|
@ -160,13 +160,21 @@ public abstract class MergePolicy {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Expert: Sets the {@link SegmentCommitInfo} of this {@link OneMerge}.
|
* Expert: Sets the {@link SegmentCommitInfo} of the merged segment.
|
||||||
* Allows sub-classes to e.g. set diagnostics properties.
|
* Allows sub-classes to e.g. set diagnostics properties.
|
||||||
*/
|
*/
|
||||||
public void setInfo(SegmentCommitInfo info) {
|
public void setMergeInfo(SegmentCommitInfo info) {
|
||||||
this.info = info;
|
this.info = info;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the {@link SegmentCommitInfo} for the merged segment,
|
||||||
|
* or null if it hasn't been set yet.
|
||||||
|
*/
|
||||||
|
public SegmentCommitInfo getMergeInfo() {
|
||||||
|
return info;
|
||||||
|
}
|
||||||
|
|
||||||
/** Expert: If {@link #getMergeReaders()} reorders document IDs, this method
|
/** Expert: If {@link #getMergeReaders()} reorders document IDs, this method
|
||||||
* must be overridden to return a mapping from the <i>natural</i> doc ID
|
* must be overridden to return a mapping from the <i>natural</i> doc ID
|
||||||
* (the doc ID that would result from a natural merge) to the actual doc
|
* (the doc ID that would result from a natural merge) to the actual doc
|
||||||
|
@ -239,7 +247,7 @@ public abstract class MergePolicy {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Return {@link MergeInfo} describing this merge. */
|
/** Return {@link MergeInfo} describing this merge. */
|
||||||
public MergeInfo getMergeInfo() {
|
public MergeInfo getStoreMergeInfo() {
|
||||||
return new MergeInfo(totalMaxDoc, estimatedMergeBytes, isExternal, maxNumSegments);
|
return new MergeInfo(totalMaxDoc, estimatedMergeBytes, isExternal, maxNumSegments);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -561,10 +561,10 @@ public class TestDemoParallelLeafReader extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setInfo(SegmentCommitInfo info) {
|
public void setMergeInfo(SegmentCommitInfo info) {
|
||||||
// Record that this merged segment is current as of this schemaGen:
|
// Record that this merged segment is current as of this schemaGen:
|
||||||
info.info.getDiagnostics().put(SCHEMA_GEN_KEY, Long.toString(schemaGen));
|
info.info.getDiagnostics().put(SCHEMA_GEN_KEY, Long.toString(schemaGen));
|
||||||
super.setInfo(info);
|
super.setMergeInfo(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -319,10 +319,14 @@ public class TestIndexWriterMerging extends LuceneTestCase
|
||||||
if (merge == null) {
|
if (merge == null) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
int numDocs = 0;
|
||||||
for(int i=0;i<merge.segments.size();i++) {
|
for(int i=0;i<merge.segments.size();i++) {
|
||||||
assert merge.segments.get(i).info.maxDoc() < 20;
|
int maxDoc = merge.segments.get(i).info.maxDoc();
|
||||||
|
numDocs += maxDoc;
|
||||||
|
assertTrue(maxDoc < 20);
|
||||||
}
|
}
|
||||||
writer.merge(merge);
|
writer.merge(merge);
|
||||||
|
assertEquals(numDocs, merge.getMergeInfo().info.maxDoc());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -122,10 +122,10 @@ public final class SortingMergePolicy extends MergePolicy {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setInfo(SegmentCommitInfo info) {
|
public void setMergeInfo(SegmentCommitInfo info) {
|
||||||
Map<String,String> diagnostics = info.info.getDiagnostics();
|
Map<String,String> diagnostics = info.info.getDiagnostics();
|
||||||
diagnostics.put(SORTER_ID_PROP, sorter.getID());
|
diagnostics.put(SORTER_ID_PROP, sorter.getID());
|
||||||
super.setInfo(info);
|
super.setMergeInfo(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
private PackedLongValues getDeletes(List<CodecReader> readers) {
|
private PackedLongValues getDeletes(List<CodecReader> readers) {
|
||||||
|
|
Loading…
Reference in New Issue