diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedBlockReconstructor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedBlockReconstructor.java index cd59f51528..3ead793542 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedBlockReconstructor.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedBlockReconstructor.java @@ -135,9 +135,16 @@ private void reconstructTargets(int toReconstructLen) throws IOException { resetBuffers(inputs); DataNodeFaultInjector.get().badDecoding(outputs); + long start = Time.monotonicNow(); try { getValidator().validate(inputs, erasedIndices, outputs); + long validateEnd = Time.monotonicNow(); + getDatanode().getMetrics().incrECReconstructionValidateTime( + validateEnd - start); } catch (InvalidDecodingException e) { + long validateFailedEnd = Time.monotonicNow(); + getDatanode().getMetrics().incrECReconstructionValidateTime( + validateFailedEnd - start); getDatanode().getMetrics().incrECInvalidReconstructionTasks(); throw e; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java index 2debc3ee0a..5203d7bf87 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java @@ -168,6 +168,8 @@ public class DataNodeMetrics { private MutableCounterLong ecReconstructionDecodingTimeMillis; @Metric("Milliseconds spent on write by erasure coding worker") private MutableCounterLong ecReconstructionWriteTimeMillis; + @Metric("Milliseconds spent on validating by erasure coding worker") + private MutableCounterLong ecReconstructionValidateTimeMillis; @Metric("Sum of all BPServiceActors command queue length") private MutableCounterLong sumOfActorCommandQueueLength; @Metric("Num of processed commands of all BPServiceActors") @@ -629,6 +631,10 @@ public void incrECReconstructionDecodingTime(long millis) { ecReconstructionDecodingTimeMillis.incr(millis); } + public void incrECReconstructionValidateTime(long millis) { + ecReconstructionValidateTimeMillis.incr(millis); + } + public DataNodeUsageReport getDNUsageReport(long timeSinceLastReport) { return dnUsageReportUtil.getUsageReport(bytesWritten.value(), bytesRead .value(), totalWriteTime.value(), totalReadTime.value(), diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestReconstructStripedBlocks.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestReconstructStripedBlocks.java index 94a9ae1a4e..fc307bf84d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestReconstructStripedBlocks.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestReconstructStripedBlocks.java @@ -364,7 +364,7 @@ public void testCountLiveReplicas() throws Exception { } } - @Test(timeout=120000) // 1 min timeout + @Test(timeout=120000) // 2 min timeout public void testReconstructionWork() throws Exception { Configuration conf = new HdfsConfiguration(); conf.setLong(DFSConfigKeys.DFS_NAMENODE_MIN_BLOCK_SIZE_KEY, 0);