diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockReconstructionWork.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockReconstructionWork.java index d383191584..df76a15c73 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockReconstructionWork.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockReconstructionWork.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hdfs.server.blockmanagement; import org.apache.hadoop.net.Node; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.Collections; import java.util.List; @@ -30,6 +32,10 @@ * Reconstruction is done by transferring data from srcNodes to targets */ abstract class BlockReconstructionWork { + + public static final Logger LOG = + LoggerFactory.getLogger(BlockReconstructionWork.class); + private final BlockInfo block; private final String srcPath; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/ErasureCodingWork.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/ErasureCodingWork.java index 147f8cfe6e..f0e6d49ba1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/ErasureCodingWork.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/ErasureCodingWork.java @@ -44,7 +44,7 @@ public ErasureCodingWork(String blockPoolId, BlockInfo block, liveReplicaStorages, additionalReplRequired, priority); this.blockPoolId = blockPoolId; this.liveBlockIndicies = liveBlockIndicies; - BlockManager.LOG.debug("Creating an ErasureCodingWork to {} reconstruct ", + LOG.debug("Creating an ErasureCodingWork to {} reconstruct ", block); } @@ -157,10 +157,8 @@ private void createReplicationWork(int sourceIndex, internBlkLen, stripedBlk.getGenerationStamp()); source.addBlockToBeReplicated(targetBlk, new DatanodeStorageInfo[] {target}); - if (BlockManager.LOG.isDebugEnabled()) { - BlockManager.LOG.debug("Add replication task from source {} to " - + "target {} for EC block {}", source, target, targetBlk); - } + LOG.debug("Add replication task from source {} to " + + "target {} for EC block {}", source, target, targetBlk); } private List findLeavingServiceSources() { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/ReplicationWork.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/ReplicationWork.java index f250bcbaa4..5e10ebeaa0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/ReplicationWork.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/ReplicationWork.java @@ -32,8 +32,7 @@ public ReplicationWork(BlockInfo block, BlockCollection bc, assert getSrcNodes().length == 1 : "There should be exactly 1 source node that have been selected"; getSrcNodes()[0].incrementPendingReplicationWithoutTargets(); - BlockManager.LOG - .debug("Creating a ReplicationWork to reconstruct " + block); + LOG.debug("Creating a ReplicationWork to reconstruct " + block); } @Override