mirror of https://github.com/apache/lucene.git
Never throttle creation of compound files. (#12070)
`ConcurrentMergeScheduler` uses the rate at which a merge writes bytes as a proxy for CPU usage, in order to prevent merging from disrupting searches too much. However creating compound files are lightweight CPU-wise and do not need throttling. Closes #12068
This commit is contained in:
parent
56ec51e558
commit
a8ef03d979
|
@ -199,6 +199,9 @@ Improvements
|
|||
|
||||
* GITHUB#12034: Remove null check in IndexReaderContext#leaves() usages (Erik Pellizzon)
|
||||
|
||||
* GITHUB#12070: Compound file creation is no longer subject to merge throttling.
|
||||
(Adrien Grand)
|
||||
|
||||
Bug Fixes
|
||||
---------------------
|
||||
* GITHUB#11726: Indexing term vectors on large documents could fail due to
|
||||
|
|
|
@ -5210,7 +5210,10 @@ public class IndexWriter
|
|||
success = false;
|
||||
|
||||
Collection<String> filesToRemove = merge.info.files();
|
||||
TrackingDirectoryWrapper trackingCFSDir = new TrackingDirectoryWrapper(mergeDirectory);
|
||||
// NOTE: Creation of the CFS file must be performed with the original
|
||||
// directory rather than with the merging directory, so that it is not
|
||||
// subject to merge throttling.
|
||||
TrackingDirectoryWrapper trackingCFSDir = new TrackingDirectoryWrapper(directory);
|
||||
try {
|
||||
createCompoundFile(
|
||||
infoStream, trackingCFSDir, merge.info.info, context, this::deleteNewFiles);
|
||||
|
|
Loading…
Reference in New Issue