diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 2825f91b2d6..d702615b19c 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -138,6 +138,10 @@ Changes in Backwards Compatibility Policy
API Changes
+* LUCENE-5711: MergePolicy accepts an IndexWriter instance
+ on each method rather than holding state against a single
+ IndexWriter instance. (Simon Willnauer)
+
* LUCENE-5582: Deprecate IndexOutput.length (just use
IndexOutput.getFilePointer instead) and IndexOutput.setLength.
(Mike McCandless)
diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
index 8a1690559fd..3c69d56d9f7 100644
--- a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
@@ -705,7 +705,6 @@ public class IndexWriter implements Closeable, TwoPhaseCommit{
analyzer = config.getAnalyzer();
infoStream = config.getInfoStream();
mergePolicy = config.getMergePolicy();
- mergePolicy.setIndexWriter(this);
mergeScheduler = config.getMergeScheduler();
codec = config.getCodec();
@@ -1781,7 +1780,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit{
MergePolicy.MergeSpecification spec;
boolean newMergesFound = false;
synchronized(this) {
- spec = mergePolicy.findForcedDeletesMerges(segmentInfos);
+ spec = mergePolicy.findForcedDeletesMerges(segmentInfos, this);
newMergesFound = spec != null;
if (newMergesFound) {
final int numMerges = spec.merges.size();
@@ -1901,7 +1900,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit{
if (maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS) {
assert trigger == MergeTrigger.EXPLICIT || trigger == MergeTrigger.MERGE_FINISHED :
"Expected EXPLICT or MERGE_FINISHED as trigger even with maxNumSegments set but was: " + trigger.name();
- spec = mergePolicy.findForcedMerges(segmentInfos, maxNumSegments, Collections.unmodifiableMap(segmentsToMerge));
+ spec = mergePolicy.findForcedMerges(segmentInfos, maxNumSegments, Collections.unmodifiableMap(segmentsToMerge), this);
newMergesFound = spec != null;
if (newMergesFound) {
final int numMerges = spec.merges.size();
@@ -1911,7 +1910,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit{
}
}
} else {
- spec = mergePolicy.findMerges(trigger, segmentInfos);
+ spec = mergePolicy.findMerges(trigger, segmentInfos, this);
}
newMergesFound = spec != null;
if (newMergesFound) {
@@ -2598,7 +2597,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit{
return;
}
ensureOpen();
- useCompoundFile = mergePolicy.useCompoundFile(segmentInfos, infoPerCommit);
+ useCompoundFile = mergePolicy.useCompoundFile(segmentInfos, infoPerCommit, this);
}
// Now create the compound file if needed
@@ -4019,7 +4018,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit{
//System.out.println("merger set hasProx=" + merger.hasProx() + " seg=" + merge.info.name);
boolean useCompoundFile;
synchronized (this) { // Guard segmentInfos
- useCompoundFile = mergePolicy.useCompoundFile(segmentInfos, merge.info);
+ useCompoundFile = mergePolicy.useCompoundFile(segmentInfos, merge.info, this);
}
if (useCompoundFile) {
diff --git a/lucene/core/src/java/org/apache/lucene/index/LogByteSizeMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/LogByteSizeMergePolicy.java
index a36b61a1170..1d077f42752 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LogByteSizeMergePolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LogByteSizeMergePolicy.java
@@ -43,8 +43,8 @@ public class LogByteSizeMergePolicy extends LogMergePolicy {
}
@Override
- protected long size(SegmentCommitInfo info) throws IOException {
- return sizeBytes(info);
+ protected long size(SegmentCommitInfo info, IndexWriter writer) throws IOException {
+ return sizeBytes(info, writer);
}
/**
Determines the largest segment (measured by total
diff --git a/lucene/core/src/java/org/apache/lucene/index/LogDocMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/LogDocMergePolicy.java
index 986e3a2a90a..1b9a061f0f3 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LogDocMergePolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LogDocMergePolicy.java
@@ -40,8 +40,8 @@ public class LogDocMergePolicy extends LogMergePolicy {
}
@Override
- protected long size(SegmentCommitInfo info) throws IOException {
- return sizeDocs(info);
+ protected long size(SegmentCommitInfo info, IndexWriter writer) throws IOException {
+ return sizeDocs(info, writer);
}
/** Sets the minimum size for the lowest level segments.
diff --git a/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java
index c931f801411..18949b4e80d 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java
@@ -99,16 +99,15 @@ public abstract class LogMergePolicy extends MergePolicy {
/** Returns true if {@code LMP} is enabled in {@link
* IndexWriter}'s {@code infoStream}. */
- protected boolean verbose() {
- final IndexWriter w = writer.get();
- return w != null && w.infoStream.isEnabled("LMP");
+ protected boolean verbose(IndexWriter writer) {
+ return writer != null && writer.infoStream.isEnabled("LMP");
}
/** Print a debug message to {@link IndexWriter}'s {@code
* infoStream}. */
- protected void message(String message) {
- if (verbose()) {
- writer.get().infoStream.message("LMP", message);
+ protected void message(String message, IndexWriter writer) {
+ if (verbose(writer)) {
+ writer.infoStream.message("LMP", message);
}
}
@@ -154,9 +153,9 @@ public abstract class LogMergePolicy extends MergePolicy {
* SegmentCommitInfo}, pro-rated by percentage of
* non-deleted documents if {@link
* #setCalibrateSizeByDeletes} is set. */
- protected long sizeDocs(SegmentCommitInfo info) throws IOException {
+ protected long sizeDocs(SegmentCommitInfo info, IndexWriter writer) throws IOException {
if (calibrateSizeByDeletes) {
- int delCount = writer.get().numDeletedDocs(info);
+ int delCount = writer.numDeletedDocs(info);
assert delCount <= info.info.getDocCount();
return (info.info.getDocCount() - (long)delCount);
} else {
@@ -168,9 +167,9 @@ public abstract class LogMergePolicy extends MergePolicy {
* SegmentCommitInfo}, pro-rated by percentage of
* non-deleted documents if {@link
* #setCalibrateSizeByDeletes} is set. */
- protected long sizeBytes(SegmentCommitInfo info) throws IOException {
+ protected long sizeBytes(SegmentCommitInfo info, IndexWriter writer) throws IOException {
if (calibrateSizeByDeletes) {
- return super.size(info);
+ return super.size(info, writer);
}
return info.sizeInBytes();
}
@@ -178,7 +177,7 @@ public abstract class LogMergePolicy extends MergePolicy {
/** Returns true if the number of segments eligible for
* merging is less than or equal to the specified {@code
* maxNumSegments}. */
- protected boolean isMerged(SegmentInfos infos, int maxNumSegments, Map segmentsToMerge) throws IOException {
+ protected boolean isMerged(SegmentInfos infos, int maxNumSegments, Map segmentsToMerge, IndexWriter writer) throws IOException {
final int numSegments = infos.size();
int numToMerge = 0;
SegmentCommitInfo mergeInfo = null;
@@ -194,7 +193,7 @@ public abstract class LogMergePolicy extends MergePolicy {
}
return numToMerge <= maxNumSegments &&
- (numToMerge != 1 || !segmentIsOriginal || isMerged(infos, mergeInfo));
+ (numToMerge != 1 || !segmentIsOriginal || isMerged(infos, mergeInfo, writer));
}
/**
@@ -206,20 +205,20 @@ public abstract class LogMergePolicy extends MergePolicy {
* maxNumSegments} will remain, but <= that number.
*/
private MergeSpecification findForcedMergesSizeLimit(
- SegmentInfos infos, int maxNumSegments, int last) throws IOException {
+ SegmentInfos infos, int maxNumSegments, int last, IndexWriter writer) throws IOException {
MergeSpecification spec = new MergeSpecification();
final List segments = infos.asList();
int start = last - 1;
while (start >= 0) {
SegmentCommitInfo info = infos.info(start);
- if (size(info) > maxMergeSizeForForcedMerge || sizeDocs(info) > maxMergeDocs) {
- if (verbose()) {
- message("findForcedMergesSizeLimit: skip segment=" + info + ": size is > maxMergeSize (" + maxMergeSizeForForcedMerge + ") or sizeDocs is > maxMergeDocs (" + maxMergeDocs + ")");
+ if (size(info, writer) > maxMergeSizeForForcedMerge || sizeDocs(info, writer) > maxMergeDocs) {
+ if (verbose(writer)) {
+ message("findForcedMergesSizeLimit: skip segment=" + info + ": size is > maxMergeSize (" + maxMergeSizeForForcedMerge + ") or sizeDocs is > maxMergeDocs (" + maxMergeDocs + ")", writer);
}
// need to skip that segment + add a merge for the 'right' segments,
// unless there is only 1 which is merged.
- if (last - start - 1 > 1 || (start != last - 1 && !isMerged(infos, infos.info(start + 1)))) {
+ if (last - start - 1 > 1 || (start != last - 1 && !isMerged(infos, infos.info(start + 1), writer))) {
// there is more than 1 segment to the right of
// this one, or a mergeable single segment.
spec.add(new OneMerge(segments.subList(start + 1, last)));
@@ -235,7 +234,7 @@ public abstract class LogMergePolicy extends MergePolicy {
// Add any left-over segments, unless there is just 1
// already fully merged
- if (last > 0 && (++start + 1 < last || !isMerged(infos, infos.info(start)))) {
+ if (last > 0 && (++start + 1 < last || !isMerged(infos, infos.info(start), writer))) {
spec.add(new OneMerge(segments.subList(start, last)));
}
@@ -247,7 +246,7 @@ public abstract class LogMergePolicy extends MergePolicy {
* the returned merges only by the {@code maxNumSegments} parameter, and
* guaranteed that exactly that number of segments will remain in the index.
*/
- private MergeSpecification findForcedMergesMaxNumSegments(SegmentInfos infos, int maxNumSegments, int last) throws IOException {
+ private MergeSpecification findForcedMergesMaxNumSegments(SegmentInfos infos, int maxNumSegments, int last, IndexWriter writer) throws IOException {
MergeSpecification spec = new MergeSpecification();
final List segments = infos.asList();
@@ -265,7 +264,7 @@ public abstract class LogMergePolicy extends MergePolicy {
// Since we must merge down to 1 segment, the
// choice is simple:
- if (last > 1 || !isMerged(infos, infos.info(0))) {
+ if (last > 1 || !isMerged(infos, infos.info(0), writer)) {
spec.add(new OneMerge(segments.subList(0, last)));
}
} else if (last > maxNumSegments) {
@@ -288,9 +287,9 @@ public abstract class LogMergePolicy extends MergePolicy {
for(int i=0;i segmentsToMerge) throws IOException {
+ int maxNumSegments, Map segmentsToMerge, IndexWriter writer) throws IOException {
assert maxNumSegments > 0;
- if (verbose()) {
- message("findForcedMerges: maxNumSegs=" + maxNumSegments + " segsToMerge="+ segmentsToMerge);
+ if (verbose(writer)) {
+ message("findForcedMerges: maxNumSegs=" + maxNumSegments + " segsToMerge="+ segmentsToMerge, writer);
}
// If the segments are already merged (e.g. there's only 1 segment), or
// there are maxMergeSizeForForcedMerge || sizeDocs(info) > maxMergeDocs) {
+ if (size(info, writer) > maxMergeSizeForForcedMerge || sizeDocs(info, writer) > maxMergeDocs) {
anyTooLarge = true;
break;
}
}
if (anyTooLarge) {
- return findForcedMergesSizeLimit(infos, maxNumSegments, last);
+ return findForcedMergesSizeLimit(infos, maxNumSegments, last, writer);
} else {
- return findForcedMergesMaxNumSegments(infos, maxNumSegments, last);
+ return findForcedMergesMaxNumSegments(infos, maxNumSegments, last, writer);
}
}
@@ -380,33 +379,32 @@ public abstract class LogMergePolicy extends MergePolicy {
* deletes, up to mergeFactor at a time.
*/
@Override
- public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos)
+ public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos, IndexWriter writer)
throws IOException {
final List segments = segmentInfos.asList();
final int numSegments = segments.size();
- if (verbose()) {
- message("findForcedDeleteMerges: " + numSegments + " segments");
+ if (verbose(writer)) {
+ message("findForcedDeleteMerges: " + numSegments + " segments", writer);
}
MergeSpecification spec = new MergeSpecification();
int firstSegmentWithDeletions = -1;
- IndexWriter w = writer.get();
- assert w != null;
+ assert writer != null;
for(int i=0;i 0) {
- if (verbose()) {
- message(" segment " + info.info.name + " has deletions");
+ if (verbose(writer)) {
+ message(" segment " + info.info.name + " has deletions", writer);
}
if (firstSegmentWithDeletions == -1)
firstSegmentWithDeletions = i;
else if (i - firstSegmentWithDeletions == mergeFactor) {
// We've seen mergeFactor segments in a row with
// deletions, so force a merge now:
- if (verbose()) {
- message(" add merge " + firstSegmentWithDeletions + " to " + (i-1) + " inclusive");
+ if (verbose(writer)) {
+ message(" add merge " + firstSegmentWithDeletions + " to " + (i-1) + " inclusive", writer);
}
spec.add(new OneMerge(segments.subList(firstSegmentWithDeletions, i)));
firstSegmentWithDeletions = i;
@@ -415,8 +413,8 @@ public abstract class LogMergePolicy extends MergePolicy {
// End of a sequence of segments with deletions, so,
// merge those past segments even if it's fewer than
// mergeFactor segments
- if (verbose()) {
- message(" add merge " + firstSegmentWithDeletions + " to " + (i-1) + " inclusive");
+ if (verbose(writer)) {
+ message(" add merge " + firstSegmentWithDeletions + " to " + (i-1) + " inclusive", writer);
}
spec.add(new OneMerge(segments.subList(firstSegmentWithDeletions, i)));
firstSegmentWithDeletions = -1;
@@ -424,8 +422,8 @@ public abstract class LogMergePolicy extends MergePolicy {
}
if (firstSegmentWithDeletions != -1) {
- if (verbose()) {
- message(" add merge " + firstSegmentWithDeletions + " to " + (numSegments-1) + " inclusive");
+ if (verbose(writer)) {
+ message(" add merge " + firstSegmentWithDeletions + " to " + (numSegments-1) + " inclusive", writer);
}
spec.add(new OneMerge(segments.subList(firstSegmentWithDeletions, numSegments)));
}
@@ -459,11 +457,11 @@ public abstract class LogMergePolicy extends MergePolicy {
* will return multiple merges, allowing the {@link
* MergeScheduler} to use concurrency. */
@Override
- public MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos infos) throws IOException {
+ public MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos infos, IndexWriter writer) throws IOException {
final int numSegments = infos.size();
- if (verbose()) {
- message("findMerges: " + numSegments + " segments");
+ if (verbose(writer)) {
+ message("findMerges: " + numSegments + " segments", writer);
}
// Compute levels, which is just log (base mergeFactor)
@@ -471,11 +469,11 @@ public abstract class LogMergePolicy extends MergePolicy {
final List levels = new ArrayList<>();
final float norm = (float) Math.log(mergeFactor);
- final Collection mergingSegments = writer.get().getMergingSegments();
+ final Collection mergingSegments = writer.getMergingSegments();
for(int i=0;i= maxMergeSize) {
extra += " [skip: too large]";
}
- message("seg=" + writer.get().segString(info) + " level=" + infoLevel.level + " size=" + String.format(Locale.ROOT, "%.3f MB", segBytes/1024/1024.) + extra);
+ message("seg=" + writer.segString(info) + " level=" + infoLevel.level + " size=" + String.format(Locale.ROOT, "%.3f MB", segBytes/1024/1024.) + extra, writer);
}
}
@@ -547,8 +545,8 @@ public abstract class LogMergePolicy extends MergePolicy {
}
upto--;
}
- if (verbose()) {
- message(" level " + levelBottom + " to " + maxLevel + ": " + (1+upto-start) + " segments");
+ if (verbose(writer)) {
+ message(" level " + levelBottom + " to " + maxLevel + ": " + (1+upto-start) + " segments", writer);
}
// Finally, record all merges that are viable at this level:
@@ -558,7 +556,7 @@ public abstract class LogMergePolicy extends MergePolicy {
boolean anyMerging = false;
for(int i=start;i= maxMergeSize || sizeDocs(info) >= maxMergeDocs);
+ anyTooLarge |= (size(info, writer) >= maxMergeSize || sizeDocs(info, writer) >= maxMergeDocs);
if (mergingSegments.contains(info)) {
anyMerging = true;
break;
@@ -575,12 +573,12 @@ public abstract class LogMergePolicy extends MergePolicy {
mergeInfos.add(levels.get(i).info);
assert infos.contains(levels.get(i).info);
}
- if (verbose()) {
- message(" add merge=" + writer.get().segString(mergeInfos) + " start=" + start + " end=" + end);
+ if (verbose(writer)) {
+ message(" add merge=" + writer.segString(mergeInfos) + " start=" + start + " end=" + end, writer);
}
spec.add(new OneMerge(mergeInfos));
- } else if (verbose()) {
- message(" " + start + " to " + end + ": contains segment over maxMergeSize or maxMergeDocs; skipping");
+ } else if (verbose(writer)) {
+ message(" " + start + " to " + end + ": contains segment over maxMergeSize or maxMergeDocs; skipping", writer);
}
start = end;
diff --git a/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java
index 632f45c803b..efc343eec50 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java
@@ -43,7 +43,7 @@ import java.util.Map;
* {@link MergeSpecification} instance describing the set of
* merges that should be done, or null if no merges are
* necessary. When IndexWriter.forceMerge is called, it calls
- * {@link #findForcedMerges(SegmentInfos,int,Map)} and the MergePolicy should
+ * {@link #findForcedMerges(SegmentInfos,int,Map, IndexWriter)} and the MergePolicy should
* then return the necessary merges.
*
* Note that the policy can return more than one merge at
@@ -372,9 +372,6 @@ public abstract class MergePolicy implements java.io.Closeable {
*/
protected static final long DEFAULT_MAX_CFS_SEGMENT_SIZE = Long.MAX_VALUE;
- /** {@link IndexWriter} that contains this instance. */
- protected SetOnce writer;
-
/** If the size of the merge segment exceeds this ratio of
* the total index size then it will remain in
* non-compound format */
@@ -385,9 +382,7 @@ public abstract class MergePolicy implements java.io.Closeable {
protected long maxCFSSegmentSize = DEFAULT_MAX_CFS_SEGMENT_SIZE;
/**
- * Creates a new merge policy instance. Note that if you intend to use it
- * without passing it to {@link IndexWriter}, you should call
- * {@link #setIndexWriter(IndexWriter)}.
+ * Creates a new merge policy instance.
*/
public MergePolicy() {
this(DEFAULT_NO_CFS_RATIO, DEFAULT_MAX_CFS_SEGMENT_SIZE);
@@ -399,22 +394,10 @@ public abstract class MergePolicy implements java.io.Closeable {
* defaults than the {@link MergePolicy}
*/
protected MergePolicy(double defaultNoCFSRatio, long defaultMaxCFSSegmentSize) {
- writer = new SetOnce<>();
this.noCFSRatio = defaultNoCFSRatio;
this.maxCFSSegmentSize = defaultMaxCFSSegmentSize;
}
- /**
- * Sets the {@link IndexWriter} to use by this merge policy. This method is
- * allowed to be called only once, and is usually set by IndexWriter. If it is
- * called more than once, {@link AlreadySetException} is thrown.
- *
- * @see SetOnce
- */
- public void setIndexWriter(IndexWriter writer) {
- this.writer.set(writer);
- }
-
/**
* Determine what set of merge operations are now necessary on the index.
* {@link IndexWriter} calls this whenever there is a change to the segments.
@@ -423,8 +406,9 @@ public abstract class MergePolicy implements java.io.Closeable {
* @param mergeTrigger the event that triggered the merge
* @param segmentInfos
* the total set of segments in the index
+ * @param writer the IndexWriter to find the merges on
*/
- public abstract MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos)
+ public abstract MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, IndexWriter writer)
throws IOException;
/**
@@ -447,9 +431,10 @@ public abstract class MergePolicy implements java.io.Closeable {
* an original segment present in the
* to-be-merged index; else, it was a segment
* produced by a cascaded merge.
+ * @param writer the IndexWriter to find the merges on
*/
public abstract MergeSpecification findForcedMerges(
- SegmentInfos segmentInfos, int maxSegmentCount, Map segmentsToMerge)
+ SegmentInfos segmentInfos, int maxSegmentCount, Map segmentsToMerge, IndexWriter writer)
throws IOException;
/**
@@ -458,9 +443,10 @@ public abstract class MergePolicy implements java.io.Closeable {
*
* @param segmentInfos
* the total set of segments in the index
+ * @param writer the IndexWriter to find the merges on
*/
public abstract MergeSpecification findForcedDeletesMerges(
- SegmentInfos segmentInfos) throws IOException;
+ SegmentInfos segmentInfos, IndexWriter writer) throws IOException;
/**
* Release all resources for the policy.
@@ -475,11 +461,11 @@ public abstract class MergePolicy implements java.io.Closeable {
* {@link #getMaxCFSSegmentSizeMB()} and the size is less or equal to the
* TotalIndexSize * {@link #getNoCFSRatio()} otherwise false
.
*/
- public boolean useCompoundFile(SegmentInfos infos, SegmentCommitInfo mergedInfo) throws IOException {
+ public boolean useCompoundFile(SegmentInfos infos, SegmentCommitInfo mergedInfo, IndexWriter writer) throws IOException {
if (getNoCFSRatio() == 0.0) {
return false;
}
- long mergedInfoSize = size(mergedInfo);
+ long mergedInfoSize = size(mergedInfo, writer);
if (mergedInfoSize > maxCFSSegmentSize) {
return false;
}
@@ -488,7 +474,7 @@ public abstract class MergePolicy implements java.io.Closeable {
}
long totalSize = 0;
for (SegmentCommitInfo info : infos) {
- totalSize += size(info);
+ totalSize += size(info, writer);
}
return mergedInfoSize <= getNoCFSRatio() * totalSize;
}
@@ -496,9 +482,9 @@ public abstract class MergePolicy implements java.io.Closeable {
/** Return the byte size of the provided {@link
* SegmentCommitInfo}, pro-rated by percentage of
* non-deleted documents is set. */
- protected long size(SegmentCommitInfo info) throws IOException {
+ protected long size(SegmentCommitInfo info, IndexWriter writer) throws IOException {
long byteSize = info.sizeInBytes();
- int delCount = writer.get().numDeletedDocs(info);
+ int delCount = writer.numDeletedDocs(info);
double delRatio = (info.info.getDocCount() <= 0 ? 0.0f : ((float)delCount / (float)info.info.getDocCount()));
assert delRatio <= 1.0;
return (info.info.getDocCount() <= 0 ? byteSize : (long)(byteSize * (1.0 - delRatio)));
@@ -507,13 +493,12 @@ public abstract class MergePolicy implements java.io.Closeable {
/** Returns true if this single info is already fully merged (has no
* pending deletes, is in the same dir as the
* writer, and matches the current compound file setting */
- protected final boolean isMerged(SegmentInfos infos, SegmentCommitInfo info) throws IOException {
- IndexWriter w = writer.get();
- assert w != null;
- boolean hasDeletions = w.numDeletedDocs(info) > 0;
+ protected final boolean isMerged(SegmentInfos infos, SegmentCommitInfo info, IndexWriter writer) throws IOException {
+ assert writer != null;
+ boolean hasDeletions = writer.numDeletedDocs(info) > 0;
return !hasDeletions &&
- info.info.dir == w.getDirectory() &&
- useCompoundFile(infos, info) == info.info.getUseCompoundFile();
+ info.info.dir == writer.getDirectory() &&
+ useCompoundFile(infos, info, writer) == info.info.getUseCompoundFile();
}
/** Returns current {@code noCFSRatio}.
diff --git a/lucene/core/src/java/org/apache/lucene/index/MergeTrigger.java b/lucene/core/src/java/org/apache/lucene/index/MergeTrigger.java
index 8d169d0ddb0..f147e6ebb73 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MergeTrigger.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MergeTrigger.java
@@ -19,7 +19,7 @@ package org.apache.lucene.index;
/**
* MergeTrigger is passed to
- * {@link org.apache.lucene.index.MergePolicy#findMerges(MergeTrigger, org.apache.lucene.index.SegmentInfos)} to indicate the
+ * {@link org.apache.lucene.index.MergePolicy#findMerges(MergeTrigger, org.apache.lucene.index.SegmentInfos, IndexWriter)} to indicate the
* event that triggered the merge.
*/
public enum MergeTrigger {
diff --git a/lucene/core/src/java/org/apache/lucene/index/NoMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/NoMergePolicy.java
index 15565afaa72..e503d35f231 100644
--- a/lucene/core/src/java/org/apache/lucene/index/NoMergePolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/index/NoMergePolicy.java
@@ -38,25 +38,22 @@ public final class NoMergePolicy extends MergePolicy {
public void close() {}
@Override
- public MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos) { return null; }
+ public MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, IndexWriter writer) { return null; }
@Override
public MergeSpecification findForcedMerges(SegmentInfos segmentInfos,
- int maxSegmentCount, Map segmentsToMerge) { return null; }
+ int maxSegmentCount, Map segmentsToMerge, IndexWriter writer) { return null; }
@Override
- public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos) { return null; }
+ public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos, IndexWriter writer) { return null; }
@Override
- public boolean useCompoundFile(SegmentInfos segments, SegmentCommitInfo newSegment) {
+ public boolean useCompoundFile(SegmentInfos segments, SegmentCommitInfo newSegment, IndexWriter writer) {
return newSegment.info.getUseCompoundFile();
}
@Override
- public void setIndexWriter(IndexWriter writer) {}
-
- @Override
- protected long size(SegmentCommitInfo info) throws IOException {
+ protected long size(SegmentCommitInfo info, IndexWriter writer) throws IOException {
return Long.MAX_VALUE;
}
diff --git a/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java
index 5f225c80d37..33c07be8554 100644
--- a/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java
@@ -236,11 +236,17 @@ public class TieredMergePolicy extends MergePolicy {
}
private class SegmentByteSizeDescending implements Comparator {
+
+ private final IndexWriter writer;
+
+ SegmentByteSizeDescending(IndexWriter writer) {
+ this.writer = writer;
+ }
@Override
public int compare(SegmentCommitInfo o1, SegmentCommitInfo o2) {
try {
- final long sz1 = size(o1);
- final long sz2 = size(o2);
+ final long sz1 = size(o1, writer);
+ final long sz2 = size(o2, writer);
if (sz1 > sz2) {
return -1;
} else if (sz2 > sz1) {
@@ -272,32 +278,32 @@ public class TieredMergePolicy extends MergePolicy {
}
@Override
- public MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos infos) throws IOException {
- if (verbose()) {
- message("findMerges: " + infos.size() + " segments");
+ public MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos infos, IndexWriter writer) throws IOException {
+ if (verbose(writer)) {
+ message("findMerges: " + infos.size() + " segments", writer);
}
if (infos.size() == 0) {
return null;
}
- final Collection merging = writer.get().getMergingSegments();
+ final Collection merging = writer.getMergingSegments();
final Collection toBeMerged = new HashSet<>();
final List infosSorted = new ArrayList<>(infos.asList());
- Collections.sort(infosSorted, new SegmentByteSizeDescending());
+ Collections.sort(infosSorted, new SegmentByteSizeDescending(writer));
// Compute total index bytes & print details about the index
long totIndexBytes = 0;
long minSegmentBytes = Long.MAX_VALUE;
for(SegmentCommitInfo info : infosSorted) {
- final long segBytes = size(info);
- if (verbose()) {
+ final long segBytes = size(info, writer);
+ if (verbose(writer)) {
String extra = merging.contains(info) ? " [merging]" : "";
if (segBytes >= maxMergedSegmentBytes/2.0) {
extra += " [skip: too large]";
} else if (segBytes < floorSegmentBytes) {
extra += " [floored]";
}
- message(" seg=" + writer.get().segString(info) + " size=" + String.format(Locale.ROOT, "%.3f", segBytes/1024/1024.) + " MB" + extra);
+ message(" seg=" + writer.segString(info) + " size=" + String.format(Locale.ROOT, "%.3f", segBytes/1024/1024.) + " MB" + extra, writer);
}
minSegmentBytes = Math.min(segBytes, minSegmentBytes);
@@ -308,8 +314,8 @@ public class TieredMergePolicy extends MergePolicy {
// If we have too-large segments, grace them out
// of the maxSegmentCount:
int tooBigCount = 0;
- while (tooBigCount < infosSorted.size() && size(infosSorted.get(tooBigCount)) >= maxMergedSegmentBytes/2.0) {
- totIndexBytes -= size(infosSorted.get(tooBigCount));
+ while (tooBigCount < infosSorted.size() && size(infosSorted.get(tooBigCount), writer) >= maxMergedSegmentBytes/2.0) {
+ totIndexBytes -= size(infosSorted.get(tooBigCount), writer);
tooBigCount++;
}
@@ -353,8 +359,8 @@ public class TieredMergePolicy extends MergePolicy {
final boolean maxMergeIsRunning = mergingBytes >= maxMergedSegmentBytes;
- if (verbose()) {
- message(" allowedSegmentCount=" + allowedSegCountInt + " vs count=" + infosSorted.size() + " (eligible count=" + eligible.size() + ") tooBigCount=" + tooBigCount);
+ if (verbose(writer)) {
+ message(" allowedSegmentCount=" + allowedSegCountInt + " vs count=" + infosSorted.size() + " (eligible count=" + eligible.size() + ") tooBigCount=" + tooBigCount, writer);
}
if (eligible.size() == 0) {
@@ -378,7 +384,7 @@ public class TieredMergePolicy extends MergePolicy {
boolean hitTooLarge = false;
for(int idx = startIdx;idx maxMergedSegmentBytes) {
hitTooLarge = true;
@@ -394,9 +400,9 @@ public class TieredMergePolicy extends MergePolicy {
totAfterMergeBytes += segBytes;
}
- final MergeScore score = score(candidate, hitTooLarge, mergingBytes);
- if (verbose()) {
- message(" maybe=" + writer.get().segString(candidate) + " score=" + score.getScore() + " " + score.getExplanation() + " tooLarge=" + hitTooLarge + " size=" + String.format(Locale.ROOT, "%.3f MB", totAfterMergeBytes/1024./1024.));
+ final MergeScore score = score(candidate, hitTooLarge, mergingBytes, writer);
+ if (verbose(writer)) {
+ message(" maybe=" + writer.segString(candidate) + " score=" + score.getScore() + " " + score.getExplanation() + " tooLarge=" + hitTooLarge + " size=" + String.format(Locale.ROOT, "%.3f MB", totAfterMergeBytes/1024./1024.), writer);
}
// If we are already running a max sized merge
@@ -420,8 +426,8 @@ public class TieredMergePolicy extends MergePolicy {
toBeMerged.add(info);
}
- if (verbose()) {
- message(" add merge=" + writer.get().segString(merge.segments) + " size=" + String.format(Locale.ROOT, "%.3f MB", bestMergeBytes/1024./1024.) + " score=" + String.format(Locale.ROOT, "%.3f", bestScore.getScore()) + " " + bestScore.getExplanation() + (bestTooLarge ? " [max merge]" : ""));
+ if (verbose(writer)) {
+ message(" add merge=" + writer.segString(merge.segments) + " size=" + String.format(Locale.ROOT, "%.3f MB", bestMergeBytes/1024./1024.) + " score=" + String.format(Locale.ROOT, "%.3f", bestScore.getScore()) + " " + bestScore.getExplanation() + (bestTooLarge ? " [max merge]" : ""), writer);
}
} else {
return spec;
@@ -433,12 +439,12 @@ public class TieredMergePolicy extends MergePolicy {
}
/** Expert: scores one merge; subclasses can override. */
- protected MergeScore score(List candidate, boolean hitTooLarge, long mergingBytes) throws IOException {
+ protected MergeScore score(List candidate, boolean hitTooLarge, long mergingBytes, IndexWriter writer) throws IOException {
long totBeforeMergeBytes = 0;
long totAfterMergeBytes = 0;
long totAfterMergeBytesFloored = 0;
for(SegmentCommitInfo info : candidate) {
- final long segBytes = size(info);
+ final long segBytes = size(info, writer);
totAfterMergeBytes += segBytes;
totAfterMergeBytesFloored += floorSize(segBytes);
totBeforeMergeBytes += info.sizeInBytes();
@@ -458,7 +464,7 @@ public class TieredMergePolicy extends MergePolicy {
// over time:
skew = 1.0/maxMergeAtOnce;
} else {
- skew = ((double) floorSize(size(candidate.get(0))))/totAfterMergeBytesFloored;
+ skew = ((double) floorSize(size(candidate.get(0), writer)))/totAfterMergeBytesFloored;
}
// Strongly favor merges with less skew (smaller
@@ -492,14 +498,14 @@ public class TieredMergePolicy extends MergePolicy {
}
@Override
- public MergeSpecification findForcedMerges(SegmentInfos infos, int maxSegmentCount, Map segmentsToMerge) throws IOException {
- if (verbose()) {
- message("findForcedMerges maxSegmentCount=" + maxSegmentCount + " infos=" + writer.get().segString(infos) + " segmentsToMerge=" + segmentsToMerge);
+ public MergeSpecification findForcedMerges(SegmentInfos infos, int maxSegmentCount, Map segmentsToMerge, IndexWriter writer) throws IOException {
+ if (verbose(writer)) {
+ message("findForcedMerges maxSegmentCount=" + maxSegmentCount + " infos=" + writer.segString(infos) + " segmentsToMerge=" + segmentsToMerge, writer);
}
List eligible = new ArrayList<>();
boolean forceMergeRunning = false;
- final Collection merging = writer.get().getMergingSegments();
+ final Collection merging = writer.getMergingSegments();
boolean segmentIsOriginal = false;
for(SegmentCommitInfo info : infos) {
final Boolean isOriginal = segmentsToMerge.get(info);
@@ -518,18 +524,18 @@ public class TieredMergePolicy extends MergePolicy {
}
if ((maxSegmentCount > 1 && eligible.size() <= maxSegmentCount) ||
- (maxSegmentCount == 1 && eligible.size() == 1 && (!segmentIsOriginal || isMerged(infos, eligible.get(0))))) {
- if (verbose()) {
- message("already merged");
+ (maxSegmentCount == 1 && eligible.size() == 1 && (!segmentIsOriginal || isMerged(infos, eligible.get(0), writer)))) {
+ if (verbose(writer)) {
+ message("already merged", writer);
}
return null;
}
- Collections.sort(eligible, new SegmentByteSizeDescending());
+ Collections.sort(eligible, new SegmentByteSizeDescending(writer));
- if (verbose()) {
- message("eligible=" + eligible);
- message("forceMergeRunning=" + forceMergeRunning);
+ if (verbose(writer)) {
+ message("eligible=" + eligible, writer);
+ message("forceMergeRunning=" + forceMergeRunning, writer);
}
int end = eligible.size();
@@ -542,8 +548,8 @@ public class TieredMergePolicy extends MergePolicy {
spec = new MergeSpecification();
}
final OneMerge merge = new OneMerge(eligible.subList(end-maxMergeAtOnceExplicit, end));
- if (verbose()) {
- message("add merge=" + writer.get().segString(merge.segments));
+ if (verbose(writer)) {
+ message("add merge=" + writer.segString(merge.segments), writer);
}
spec.add(merge);
end -= maxMergeAtOnceExplicit;
@@ -553,8 +559,8 @@ public class TieredMergePolicy extends MergePolicy {
// Do final merge
final int numToMerge = end - maxSegmentCount + 1;
final OneMerge merge = new OneMerge(eligible.subList(end-numToMerge, end));
- if (verbose()) {
- message("add final merge=" + merge.segString(writer.get().getDirectory()));
+ if (verbose(writer)) {
+ message("add final merge=" + merge.segString(writer.getDirectory()), writer);
}
spec = new MergeSpecification();
spec.add(merge);
@@ -564,14 +570,14 @@ public class TieredMergePolicy extends MergePolicy {
}
@Override
- public MergeSpecification findForcedDeletesMerges(SegmentInfos infos) throws IOException {
- if (verbose()) {
- message("findForcedDeletesMerges infos=" + writer.get().segString(infos) + " forceMergeDeletesPctAllowed=" + forceMergeDeletesPctAllowed);
+ public MergeSpecification findForcedDeletesMerges(SegmentInfos infos, IndexWriter writer) throws IOException {
+ if (verbose(writer)) {
+ message("findForcedDeletesMerges infos=" + writer.segString(infos) + " forceMergeDeletesPctAllowed=" + forceMergeDeletesPctAllowed, writer);
}
final List eligible = new ArrayList<>();
- final Collection merging = writer.get().getMergingSegments();
+ final Collection merging = writer.getMergingSegments();
for(SegmentCommitInfo info : infos) {
- double pctDeletes = 100.*((double) writer.get().numDeletedDocs(info))/info.info.getDocCount();
+ double pctDeletes = 100.*((double) writer.numDeletedDocs(info))/info.info.getDocCount();
if (pctDeletes > forceMergeDeletesPctAllowed && !merging.contains(info)) {
eligible.add(info);
}
@@ -581,10 +587,10 @@ public class TieredMergePolicy extends MergePolicy {
return null;
}
- Collections.sort(eligible, new SegmentByteSizeDescending());
+ Collections.sort(eligible, new SegmentByteSizeDescending(writer));
- if (verbose()) {
- message("eligible=" + eligible);
+ if (verbose(writer)) {
+ message("eligible=" + eligible, writer);
}
int start = 0;
@@ -600,8 +606,8 @@ public class TieredMergePolicy extends MergePolicy {
}
final OneMerge merge = new OneMerge(eligible.subList(start, end));
- if (verbose()) {
- message("add merge=" + writer.get().segString(merge.segments));
+ if (verbose(writer)) {
+ message("add merge=" + writer.segString(merge.segments), writer);
}
spec.add(merge);
start = end;
@@ -618,13 +624,12 @@ public class TieredMergePolicy extends MergePolicy {
return Math.max(floorSegmentBytes, bytes);
}
- private boolean verbose() {
- final IndexWriter w = writer.get();
- return w != null && w.infoStream.isEnabled("TMP");
+ private boolean verbose(IndexWriter writer) {
+ return writer != null && writer.infoStream.isEnabled("TMP");
}
- private void message(String message) {
- writer.get().infoStream.message("TMP", message);
+ private void message(String message, IndexWriter writer) {
+ writer.infoStream.message("TMP", message);
}
@Override
diff --git a/lucene/core/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java
index e8705904383..79faf5ad019 100644
--- a/lucene/core/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java
@@ -69,18 +69,12 @@ public class UpgradeIndexMergePolicy extends MergePolicy {
}
@Override
- public void setIndexWriter(IndexWriter writer) {
- super.setIndexWriter(writer);
- base.setIndexWriter(writer);
+ public MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, IndexWriter writer) throws IOException {
+ return base.findMerges(null, segmentInfos, writer);
}
@Override
- public MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos) throws IOException {
- return base.findMerges(null, segmentInfos);
- }
-
- @Override
- public MergeSpecification findForcedMerges(SegmentInfos segmentInfos, int maxSegmentCount, Map segmentsToMerge) throws IOException {
+ public MergeSpecification findForcedMerges(SegmentInfos segmentInfos, int maxSegmentCount, Map segmentsToMerge, IndexWriter writer) throws IOException {
// first find all old segments
final Map oldSegments = new HashMap<>();
for (final SegmentCommitInfo si : segmentInfos) {
@@ -90,14 +84,14 @@ public class UpgradeIndexMergePolicy extends MergePolicy {
}
}
- if (verbose()) {
- message("findForcedMerges: segmentsToUpgrade=" + oldSegments);
+ if (verbose(writer)) {
+ message("findForcedMerges: segmentsToUpgrade=" + oldSegments, writer);
}
if (oldSegments.isEmpty())
return null;
- MergeSpecification spec = base.findForcedMerges(segmentInfos, maxSegmentCount, oldSegments);
+ MergeSpecification spec = base.findForcedMerges(segmentInfos, maxSegmentCount, oldSegments, writer);
if (spec != null) {
// remove all segments that are in merge specification from oldSegments,
@@ -109,9 +103,9 @@ public class UpgradeIndexMergePolicy extends MergePolicy {
}
if (!oldSegments.isEmpty()) {
- if (verbose()) {
+ if (verbose(writer)) {
message("findForcedMerges: " + base.getClass().getSimpleName() +
- " does not want to merge all old segments, merge remaining ones into new segment: " + oldSegments);
+ " does not want to merge all old segments, merge remaining ones into new segment: " + oldSegments, writer);
}
final List newInfos = new ArrayList<>();
for (final SegmentCommitInfo si : segmentInfos) {
@@ -130,13 +124,13 @@ public class UpgradeIndexMergePolicy extends MergePolicy {
}
@Override
- public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos) throws IOException {
- return base.findForcedDeletesMerges(segmentInfos);
+ public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos, IndexWriter writer) throws IOException {
+ return base.findForcedDeletesMerges(segmentInfos, writer);
}
@Override
- public boolean useCompoundFile(SegmentInfos segments, SegmentCommitInfo newSegment) throws IOException {
- return base.useCompoundFile(segments, newSegment);
+ public boolean useCompoundFile(SegmentInfos segments, SegmentCommitInfo newSegment, IndexWriter writer) throws IOException {
+ return base.useCompoundFile(segments, newSegment, writer);
}
@Override
@@ -149,12 +143,11 @@ public class UpgradeIndexMergePolicy extends MergePolicy {
return "[" + getClass().getSimpleName() + "->" + base + "]";
}
- private boolean verbose() {
- final IndexWriter w = writer.get();
- return w != null && w.infoStream.isEnabled("UPGMP");
+ private boolean verbose(IndexWriter writer) {
+ return writer != null && writer.infoStream.isEnabled("UPGMP");
}
- private void message(String message) {
- writer.get().infoStream.message("UPGMP", message);
+ private void message(String message, IndexWriter writer) {
+ writer.infoStream.message("UPGMP", message);
}
}
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestNoMergePolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestNoMergePolicy.java
index 47b0384500d..8c1bb9f3295 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestNoMergePolicy.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestNoMergePolicy.java
@@ -30,9 +30,9 @@ public class TestNoMergePolicy extends LuceneTestCase {
@Test
public void testNoMergePolicy() throws Exception {
MergePolicy mp = NoMergePolicy.INSTANCE;
- assertNull(mp.findMerges(null, (SegmentInfos)null));
- assertNull(mp.findForcedMerges(null, 0, null));
- assertNull(mp.findForcedDeletesMerges(null));
+ assertNull(mp.findMerges(null, (SegmentInfos)null, null));
+ assertNull(mp.findForcedMerges(null, 0, null, null));
+ assertNull(mp.findForcedDeletesMerges(null, null));
mp.close();
}
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java b/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java
index 8340b792f0f..a1d6dafe79f 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java
@@ -257,7 +257,7 @@ public class TestPerSegmentDeletes extends LuceneTestCase {
public void close() {}
@Override
- public MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos)
+ public MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, IndexWriter writer)
throws IOException {
MergeSpecification ms = new MergeSpecification();
if (doMerge) {
@@ -271,19 +271,19 @@ public class TestPerSegmentDeletes extends LuceneTestCase {
@Override
public MergeSpecification findForcedMerges(SegmentInfos segmentInfos,
- int maxSegmentCount, Map segmentsToMerge)
+ int maxSegmentCount, Map segmentsToMerge, IndexWriter writer)
throws IOException {
return null;
}
@Override
public MergeSpecification findForcedDeletesMerges(
- SegmentInfos segmentInfos) throws IOException {
+ SegmentInfos segmentInfos, IndexWriter writer) throws IOException {
return null;
}
@Override
- public boolean useCompoundFile(SegmentInfos segments, SegmentCommitInfo newSegment) {
+ public boolean useCompoundFile(SegmentInfos segments, SegmentCommitInfo newSegment, IndexWriter writer) {
return useCompoundFile;
}
}
diff --git a/lucene/misc/src/java/org/apache/lucene/index/sorter/SortingMergePolicy.java b/lucene/misc/src/java/org/apache/lucene/index/sorter/SortingMergePolicy.java
index afc9cac3649..19735535d8e 100644
--- a/lucene/misc/src/java/org/apache/lucene/index/sorter/SortingMergePolicy.java
+++ b/lucene/misc/src/java/org/apache/lucene/index/sorter/SortingMergePolicy.java
@@ -186,21 +186,21 @@ public final class SortingMergePolicy extends MergePolicy {
@Override
public MergeSpecification findMerges(MergeTrigger mergeTrigger,
- SegmentInfos segmentInfos) throws IOException {
- return sortedMergeSpecification(in.findMerges(mergeTrigger, segmentInfos));
+ SegmentInfos segmentInfos, IndexWriter writer) throws IOException {
+ return sortedMergeSpecification(in.findMerges(mergeTrigger, segmentInfos, writer));
}
@Override
public MergeSpecification findForcedMerges(SegmentInfos segmentInfos,
- int maxSegmentCount, Map segmentsToMerge)
+ int maxSegmentCount, Map segmentsToMerge, IndexWriter writer)
throws IOException {
- return sortedMergeSpecification(in.findForcedMerges(segmentInfos, maxSegmentCount, segmentsToMerge));
+ return sortedMergeSpecification(in.findForcedMerges(segmentInfos, maxSegmentCount, segmentsToMerge, writer));
}
@Override
- public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos)
+ public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos, IndexWriter writer)
throws IOException {
- return sortedMergeSpecification(in.findForcedDeletesMerges(segmentInfos));
+ return sortedMergeSpecification(in.findForcedDeletesMerges(segmentInfos, writer));
}
@Override
@@ -210,13 +210,8 @@ public final class SortingMergePolicy extends MergePolicy {
@Override
public boolean useCompoundFile(SegmentInfos segments,
- SegmentCommitInfo newSegment) throws IOException {
- return in.useCompoundFile(segments, newSegment);
- }
-
- @Override
- public void setIndexWriter(IndexWriter writer) {
- in.setIndexWriter(writer);
+ SegmentCommitInfo newSegment, IndexWriter writer) throws IOException {
+ return in.useCompoundFile(segments, newSegment, writer);
}
@Override
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/AlcoholicMergePolicy.java b/lucene/test-framework/src/java/org/apache/lucene/index/AlcoholicMergePolicy.java
index fedf22490f6..c4ae077bd57 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/AlcoholicMergePolicy.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/AlcoholicMergePolicy.java
@@ -54,7 +54,7 @@ public class AlcoholicMergePolicy extends LogMergePolicy {
@Override
//@BlackMagic(level=Voodoo);
- protected long size(SegmentCommitInfo info) throws IOException {
+ protected long size(SegmentCommitInfo info, IndexWriter writer) throws IOException {
int hourOfDay = calendar.get(Calendar.HOUR_OF_DAY);
if (hourOfDay < 6 ||
hourOfDay > 20 ||
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java b/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java
index 2e851aa5174..e577b99993c 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java
@@ -49,14 +49,14 @@ public class MockRandomMergePolicy extends MergePolicy {
}
@Override
- public MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos) {
+ public MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, IndexWriter writer) {
MergeSpecification mergeSpec = null;
//System.out.println("MRMP: findMerges sis=" + segmentInfos);
int numSegments = segmentInfos.size();
List segments = new ArrayList<>();
- final Collection merging = writer.get().getMergingSegments();
+ final Collection merging = writer.getMergingSegments();
for(SegmentCommitInfo sipc : segmentInfos) {
if (!merging.contains(sipc)) {
@@ -85,7 +85,7 @@ public class MockRandomMergePolicy extends MergePolicy {
@Override
public MergeSpecification findForcedMerges(
- SegmentInfos segmentInfos, int maxSegmentCount, Map segmentsToMerge)
+ SegmentInfos segmentInfos, int maxSegmentCount, Map segmentsToMerge, IndexWriter writer)
throws IOException {
final List eligibleSegments = new ArrayList<>();
@@ -126,8 +126,8 @@ public class MockRandomMergePolicy extends MergePolicy {
}
@Override
- public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos) throws IOException {
- return findMerges(null, segmentInfos);
+ public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos, IndexWriter writer) throws IOException {
+ return findMerges(null, segmentInfos, writer);
}
@Override
@@ -135,7 +135,7 @@ public class MockRandomMergePolicy extends MergePolicy {
}
@Override
- public boolean useCompoundFile(SegmentInfos infos, SegmentCommitInfo mergedInfo) throws IOException {
+ public boolean useCompoundFile(SegmentInfos infos, SegmentCommitInfo mergedInfo, IndexWriter writer) throws IOException {
// 80% of the time we create CFS:
return random.nextInt(5) != 1;
}