diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java index 99584d9f3c3..2d9ccf916f2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java @@ -68,6 +68,7 @@ public class DirectoryScanner implements Runnable { + " starting at %s with interval of %dms"; private static final String START_MESSAGE_WITH_THROTTLE = START_MESSAGE + " and throttle limit of %dms/s"; + private static final int RECONCILE_BLOCKS_BATCH_SIZE = 1000; private final FsDatasetSpi dataset; private final ExecutorService reportCompileThreadPool; @@ -373,7 +374,11 @@ public class DirectoryScanner implements Runnable { */ @VisibleForTesting public void reconcile() throws IOException { + LOG.debug("reconcile start DirectoryScanning"); scan(); + // HDFS-14476: run checkAndUpadte with batch to avoid holding the lock too + // long + int loopCount = 0; for (Entry> entry : diffs.entrySet()) { String bpid = entry.getKey(); LinkedList diff = entry.getValue(); @@ -381,6 +386,15 @@ public class DirectoryScanner implements Runnable { for (ScanInfo info : diff) { dataset.checkAndUpdate(bpid, info); } + + if (loopCount % RECONCILE_BLOCKS_BATCH_SIZE == 0) { + try { + Thread.sleep(2000); + } catch (InterruptedException e) { + // do nothing + } + } + loopCount++; } if (!retainDiffs) clear(); }