diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java index acf302e9a6..e47e22e509 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java @@ -634,9 +634,9 @@ private void collectBlocksSummary(String parent, HdfsFileStatus file, Result res report.append(" Live_repl=" + liveReplicas); if (showLocations || showRacks || showReplicaDetails) { StringBuilder sb = new StringBuilder("["); - Iterable storages = bm.getStorages(block.getLocalBlock()); - for (Iterator iterator = storages.iterator(); iterator.hasNext();) { - DatanodeStorageInfo storage = iterator.next(); + DatanodeStorageInfo[] storages = bm.getStorages(storedBlock); + for (int i = 0; i < storages.length; i++) { + DatanodeStorageInfo storage = storages[i]; DatanodeDescriptor dnDesc = storage.getDatanodeDescriptor(); if (showRacks) { sb.append(NodeBase.getPath(dnDesc)); @@ -645,7 +645,7 @@ private void collectBlocksSummary(String parent, HdfsFileStatus file, Result res .getStorageType())); } if (showReplicaDetails) { - LightWeightLinkedSet blocksExcess = + LightWeightLinkedSet blocksExcess = bm.excessReplicateMap.get(dnDesc.getDatanodeUuid()); Collection corruptReplicas = bm.getCorruptReplicas(block.getLocalBlock()); @@ -666,7 +666,7 @@ private void collectBlocksSummary(String parent, HdfsFileStatus file, Result res sb.append("LIVE)"); } } - if (iterator.hasNext()) { + if (i < storages.length - 1) { sb.append(", "); } }