diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java index f97dbbfcd6..bf88e6fe88 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java @@ -406,6 +406,7 @@ public void run() { } try { reconcile(); + dataset.setLastDirScannerFinishTime(System.currentTimeMillis()); } catch (Exception e) { // Log and continue - allows Executor to run again next cycle LOG.error( diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsDatasetSpi.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsDatasetSpi.java index 4cad7aa4d3..4ab7e1be84 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsDatasetSpi.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsDatasetSpi.java @@ -692,4 +692,10 @@ ReplicaInfo moveBlockAcrossVolumes(final ExtendedBlock block, * Get the volume list. */ List getVolumeList(); + + /** + * Set the last time in milliseconds when the directory scanner successfully ran. + * @param time the last time in milliseconds when the directory scanner successfully ran. + */ + default void setLastDirScannerFinishTime(long time) {} } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java index d81b5411c5..f1115efcc2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java @@ -284,7 +284,8 @@ public LengthInputStream getMetaDataInputStream(ExtendedBlock b) private long maxDirScannerNotifyCount; private long curDirScannerNotifyCount; private long lastDirScannerNotifyTime; - + private volatile long lastDirScannerFinishTime; + /** * An FSDataset has a directory where it loads its data files. */ @@ -3811,5 +3812,15 @@ void stopAllDataxceiverThreads(FsVolumeImpl volume) { public List getVolumeList() { return volumes.getVolumes(); } + + @Override + public long getLastDirScannerFinishTime() { + return this.lastDirScannerFinishTime; + } + + @Override + public void setLastDirScannerFinishTime(long time) { + this.lastDirScannerFinishTime = time; + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetricHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetricHelper.java index b1a2d4f956..65fd92ec78 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetricHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetricHelper.java @@ -69,10 +69,11 @@ public static void getMetrics(MetricsCollector collector, " of blocks cached"), beanClass.getNumBlocksCached()) .addGauge(Interns.info("NumBlocksFailedToCache", "Datanode number of " + "blocks failed to cache"), beanClass.getNumBlocksFailedToCache()) - .addGauge(Interns.info("NumBlocksFailedToUnCache", "Datanode number of" + + .addGauge(Interns.info("NumBlocksFailedToUnCache", "Datanode number of" + " blocks failed in cache eviction"), - beanClass.getNumBlocksFailedToUncache()); - + beanClass.getNumBlocksFailedToUncache()) + .addGauge(Interns.info("LastDirectoryScannerFinishTime", + "Finish time of the last directory scan"), beanClass.getLastDirScannerFinishTime()); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/FSDatasetMBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/FSDatasetMBean.java index c2f175b97d..0bfb14e232 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/FSDatasetMBean.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/FSDatasetMBean.java @@ -122,4 +122,9 @@ public interface FSDatasetMBean extends MetricsSource { * Returns the number of blocks that the datanode was unable to uncache */ public long getNumBlocksFailedToUncache(); + + /** + * Returns the last time in milliseconds when the directory scanner successfully ran. + */ + long getLastDirScannerFinishTime(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java index 1d479f34e0..8c75ca9f75 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java @@ -939,6 +939,11 @@ public long getNumBlocksFailedToUncache() { return 0L; } + @Override + public long getLastDirScannerFinishTime() { + return 0L; + } + /** * Get metrics from the metrics source * @@ -1632,5 +1637,10 @@ public MountVolumeMap getMountVolumeMap() { public List getVolumeList() { return null; } + + @Override + public void setLastDirScannerFinishTime(long time) { + throw new UnsupportedOperationException(); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java index 74c70cec76..244b60e138 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java @@ -23,6 +23,7 @@ import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; @@ -1304,6 +1305,24 @@ public void testLocalReplicaUpdateWithReplica() throws Exception { assertEquals(realBlkFile, localReplica.getBlockFile()); } + @Test(timeout = 60000) + public void testLastDirScannerFinishTimeIsUpdated() throws Exception { + Configuration conf = getConfiguration(); + conf.setLong(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_INTERVAL_KEY, 3L); + cluster = new MiniDFSCluster.Builder(conf).build(); + try { + cluster.waitActive(); + bpid = cluster.getNamesystem().getBlockPoolId(); + final DataNode dn = cluster.getDataNodes().get(0); + fds = DataNodeTestUtils.getFSDataset(cluster.getDataNodes().get(0)); + long lastDirScannerFinishTime = fds.getLastDirScannerFinishTime(); + dn.getDirectoryScanner().run(); + assertNotEquals(lastDirScannerFinishTime, fds.getLastDirScannerFinishTime()); + } finally { + cluster.shutdown(); + } + } + public long getRandomBlockId() { return Math.abs(new Random().nextLong()); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/extdataset/ExternalDatasetImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/extdataset/ExternalDatasetImpl.java index 413a2e6b59..1bd42e0bdf 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/extdataset/ExternalDatasetImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/extdataset/ExternalDatasetImpl.java @@ -477,4 +477,10 @@ public MountVolumeMap getMountVolumeMap() { public List getVolumeList() { return null; } + + @Override + public long getLastDirScannerFinishTime() { + return 0L; + } + } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java index 0ee7eb3ec1..1c94e351c2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java @@ -238,7 +238,6 @@ public void setUp() throws IOException { for (String bpid : BLOCK_POOL_IDS) { dataset.addBlockPool(bpid, conf); } - assertEquals(NUM_INIT_VOLUMES, getNumVolumes()); assertEquals(0, dataset.getNumFailedVolumes()); } @@ -250,6 +249,13 @@ public void checkDataSetLockManager() { assertNull(manager.getLastException()); } + @Test + public void testSetLastDirScannerFinishTime() throws IOException { + assertEquals(dataset.getLastDirScannerFinishTime(), 0L); + dataset.setLastDirScannerFinishTime(System.currentTimeMillis()); + assertNotEquals(0L, dataset.getLastDirScannerFinishTime()); + } + @Test public void testAddVolumes() throws IOException { final int numNewVolumes = 3;