HDFS-4075. Reduce recommissioning overhead (Kihwal Lee via daryn)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1406278 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
b68bd472dc
commit
54b70db347
@ -1940,6 +1940,8 @@ Release 0.23.5 - UNRELEASED
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
HDFS-4075. Reduce recommissioning overhead (Kihwal Lee via daryn)
|
||||
|
||||
BUG FIXES
|
||||
|
||||
HDFS-3829. TestHftpURLTimeouts fails intermittently with JDK7 (Trevor
|
||||
|
@ -2696,6 +2696,7 @@ private void logBlockReplicationInfo(Block block, DatanodeDescriptor srcNode,
|
||||
void processOverReplicatedBlocksOnReCommission(
|
||||
final DatanodeDescriptor srcNode) {
|
||||
final Iterator<? extends Block> it = srcNode.getBlockIterator();
|
||||
int numOverReplicated = 0;
|
||||
while(it.hasNext()) {
|
||||
final Block block = it.next();
|
||||
BlockCollection bc = blocksMap.getBlockCollection(block);
|
||||
@ -2705,8 +2706,11 @@ void processOverReplicatedBlocksOnReCommission(
|
||||
if (numCurrentReplica > expectedReplication) {
|
||||
// over-replicated block
|
||||
processOverReplicatedBlock(block, expectedReplication, null, null);
|
||||
numOverReplicated++;
|
||||
}
|
||||
}
|
||||
LOG.info("Invalidated " + numOverReplicated + " over-replicated blocks on " +
|
||||
srcNode + " during recommissioning");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -608,7 +608,11 @@ void stopDecommission(DatanodeDescriptor node) {
|
||||
if (node.isDecommissionInProgress() || node.isDecommissioned()) {
|
||||
LOG.info("Stop Decommissioning " + node);
|
||||
heartbeatManager.stopDecommission(node);
|
||||
blockManager.processOverReplicatedBlocksOnReCommission(node);
|
||||
// Over-replicated blocks will be detected and processed when
|
||||
// the dead node comes back and send in its full block report.
|
||||
if (node.isAlive) {
|
||||
blockManager.processOverReplicatedBlocksOnReCommission(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user