diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java index 445e021204..aa5cdb8fee 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java @@ -66,7 +66,7 @@ public class DirectoryScanner implements Runnable { LoggerFactory.getLogger(DirectoryScanner.class); private static final int DEFAULT_MAP_SIZE = 32768; - + private static final int RECONCILE_BLOCKS_BATCH_SIZE = 1000; private final FsDatasetSpi dataset; private final ExecutorService reportCompileThreadPool; private final ScheduledExecutorService masterThread; @@ -424,10 +424,23 @@ void shutdown() { */ @VisibleForTesting public void reconcile() throws IOException { + LOG.debug("reconcile start DirectoryScanning"); scan(); + // HDFS-14476: run checkAndUpadte with batch to avoid holding the lock too + // long + int loopCount = 0; for (final Map.Entry entry : diffs.getEntries()) { dataset.checkAndUpdate(entry.getKey(), entry.getValue()); + + if (loopCount % RECONCILE_BLOCKS_BATCH_SIZE == 0) { + try { + Thread.sleep(2000); + } catch (InterruptedException e) { + // do nothing + } + } + loopCount++; } if (!retainDiffs) {