From 112c32415175f637a2791f2207c20393fc9ba740 Mon Sep 17 00:00:00 2001 From: Eli Collins Date: Fri, 6 Apr 2012 00:10:54 +0000 Subject: [PATCH] HDFS-3210. JsonUtil#toJsonMap for for a DatanodeInfo should use "ipAddr" instead of "name". Contributed by Eli Collins git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1310135 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../apache/hadoop/hdfs/protocol/DatanodeInfo.java | 4 ++-- .../java/org/apache/hadoop/hdfs/web/JsonUtil.java | 2 +- .../hdfs/web/TestWebHdfsFileSystemContract.java | 15 ++++++++++++++- 4 files changed, 20 insertions(+), 4 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index df85179ccc..d250b9a2f0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -430,6 +430,9 @@ Release 2.0.0 - UNRELEASED HDFS-3109. Remove hsqldf exclusions from pom.xml. (Ravi Prakash via suresh) + HDFS-3210. JsonUtil#toJsonMap for for a DatanodeInfo should use + "ipAddr" instead of "name". (eli) + BREAKDOWN OF HDFS-1623 SUBTASKS HDFS-2179. Add fencing framework and mechanisms for NameNode HA. (todd) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java index 22e0851f05..963e535b99 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java @@ -122,12 +122,12 @@ public DatanodeInfo(DatanodeID nodeID, String location, } /** Constructor */ - public DatanodeInfo(final String name, final String hostName, + public DatanodeInfo(final String ipAddr, final String hostName, final String storageID, final int xferPort, final int infoPort, final int ipcPort, final long capacity, final long dfsUsed, final long remaining, final long blockPoolUsed, final long lastUpdate, final int xceiverCount, final String networkLocation, final AdminStates adminState) { - super(name, hostName, storageID, xferPort, infoPort, ipcPort); + super(ipAddr, hostName, storageID, xferPort, infoPort, ipcPort); this.capacity = capacity; this.dfsUsed = dfsUsed; this.remaining = remaining; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java index 45c4445e84..9218078a48 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java @@ -305,7 +305,7 @@ private static DatanodeInfo toDatanodeInfo(final Map m) { } return new DatanodeInfo( - (String)m.get("name"), + (String)m.get("ipAddr"), (String)m.get("hostName"), (String)m.get("storageID"), (int)(long)(Long)m.get("xferPort"), diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java index a216713ac5..04ffd10b35 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java @@ -24,6 +24,7 @@ import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; +import java.util.Arrays; import java.util.Map; import javax.servlet.http.HttpServletResponse; @@ -133,8 +134,20 @@ public void testGetFileBlockLocations() throws IOException { final BlockLocation[] expected = cluster.getFileSystem().getFileBlockLocations( new Path(f), 0L, 1L); assertEquals(expected.length, computed.length); - for(int i = 0; i < computed.length; i++) { + for (int i = 0; i < computed.length; i++) { assertEquals(expected[i].toString(), computed[i].toString()); + // Check names + String names1[] = expected[i].getNames(); + String names2[] = computed[i].getNames(); + Arrays.sort(names1); + Arrays.sort(names2); + Assert.assertArrayEquals("Names differ", names1, names2); + // Check topology + String topos1[] = expected[i].getTopologyPaths(); + String topos2[] = computed[i].getTopologyPaths(); + Arrays.sort(topos1); + Arrays.sort(topos2); + Assert.assertArrayEquals("Topology differs", topos1, topos2); } }