HDFS-6313. WebHdfs may use the wrong NN when configured for multiple HA NNs. Contributed by Kihwal Lee.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1593475 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
87215cb9f6
commit
5323d5e388
@ -501,6 +501,9 @@ Release 2.4.1 - UNRELEASED
|
|||||||
HDFS-6329. WebHdfs does not work if HA is enabled on NN but logical URI is
|
HDFS-6329. WebHdfs does not work if HA is enabled on NN but logical URI is
|
||||||
not configured. (kihwal)
|
not configured. (kihwal)
|
||||||
|
|
||||||
|
HDFS-6313. WebHdfs may use the wrong NN when configured for multiple HA NNs
|
||||||
|
(kihwal)
|
||||||
|
|
||||||
Release 2.4.0 - 2014-04-07
|
Release 2.4.0 - 2014-04-07
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -100,6 +100,7 @@
|
|||||||
import org.apache.hadoop.util.Progressable;
|
import org.apache.hadoop.util.Progressable;
|
||||||
import org.mortbay.util.ajax.JSON;
|
import org.mortbay.util.ajax.JSON;
|
||||||
|
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
|
||||||
@ -1112,12 +1113,12 @@ private InetSocketAddress[] resolveNNAddr() throws IOException {
|
|||||||
Map<String, Map<String, InetSocketAddress>> addresses = DFSUtil
|
Map<String, Map<String, InetSocketAddress>> addresses = DFSUtil
|
||||||
.getHaNnWebHdfsAddresses(conf, scheme);
|
.getHaNnWebHdfsAddresses(conf, scheme);
|
||||||
|
|
||||||
for (Map<String, InetSocketAddress> addrs : addresses.values()) {
|
// Extract the entry corresponding to the logical name.
|
||||||
|
Map<String, InetSocketAddress> addrs = addresses.get(uri.getHost());
|
||||||
for (InetSocketAddress addr : addrs.values()) {
|
for (InetSocketAddress addr : addrs.values()) {
|
||||||
ret.add(addr);
|
ret.add(addr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
InetSocketAddress[] r = new InetSocketAddress[ret.size()];
|
InetSocketAddress[] r = new InetSocketAddress[ret.size()];
|
||||||
return ret.toArray(r);
|
return ret.toArray(r);
|
||||||
@ -1128,4 +1129,9 @@ public String getCanonicalServiceName() {
|
|||||||
return tokenServiceName == null ? super.getCanonicalServiceName()
|
return tokenServiceName == null ? super.getCanonicalServiceName()
|
||||||
: tokenServiceName.toString();
|
: tokenServiceName.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
InetSocketAddress[] getResolvedNNAddr() {
|
||||||
|
return nnAddrs;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -148,16 +148,40 @@ public static void formatNameNode(Configuration conf) throws IOException {
|
|||||||
*/
|
*/
|
||||||
public static Configuration newHAConfiguration(final String logicalName) {
|
public static Configuration newHAConfiguration(final String logicalName) {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
|
addHAConfiguration(conf, logicalName);
|
||||||
|
return conf;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a new HA configuration.
|
||||||
|
*/
|
||||||
|
public static void addHAConfiguration(Configuration conf,
|
||||||
|
final String logicalName) {
|
||||||
|
String nsIds = conf.get(DFSConfigKeys.DFS_NAMESERVICES);
|
||||||
|
if (nsIds == null) {
|
||||||
conf.set(DFSConfigKeys.DFS_NAMESERVICES, logicalName);
|
conf.set(DFSConfigKeys.DFS_NAMESERVICES, logicalName);
|
||||||
|
} else { // append the nsid
|
||||||
|
conf.set(DFSConfigKeys.DFS_NAMESERVICES, nsIds + "," + logicalName);
|
||||||
|
}
|
||||||
conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_HA_NAMENODES_KEY_PREFIX,
|
conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_HA_NAMENODES_KEY_PREFIX,
|
||||||
logicalName), "nn1,nn2");
|
logicalName), "nn1,nn2");
|
||||||
conf.set(DFSConfigKeys.DFS_CLIENT_FAILOVER_PROXY_PROVIDER_KEY_PREFIX + "" +
|
conf.set(DFSConfigKeys.DFS_CLIENT_FAILOVER_PROXY_PROVIDER_KEY_PREFIX + "" +
|
||||||
"." + logicalName,
|
"." + logicalName,
|
||||||
ConfiguredFailoverProxyProvider.class.getName());
|
ConfiguredFailoverProxyProvider.class.getName());
|
||||||
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1);
|
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1);
|
||||||
return conf;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static void setFakeHttpAddresses(Configuration conf,
|
||||||
|
final String logicalName) {
|
||||||
|
conf.set(DFSUtil.addKeySuffixes(
|
||||||
|
DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY,
|
||||||
|
logicalName, "nn1"), "127.0.0.1:12345");
|
||||||
|
conf.set(DFSUtil.addKeySuffixes(
|
||||||
|
DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY,
|
||||||
|
logicalName, "nn2"), "127.0.0.1:12346");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/** class MyFile contains enough information to recreate the contents of
|
/** class MyFile contains enough information to recreate the contents of
|
||||||
* a single file.
|
* a single file.
|
||||||
*/
|
*/
|
||||||
|
@ -156,4 +156,30 @@ public void testFailoverAfterOpen() throws IOException {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMultipleNamespacesConfigured() throws Exception {
|
||||||
|
Configuration conf = DFSTestUtil.newHAConfiguration(LOGICAL_NAME);
|
||||||
|
MiniDFSCluster cluster = null;
|
||||||
|
WebHdfsFileSystem fs = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
cluster = new MiniDFSCluster.Builder(conf).nnTopology(topo)
|
||||||
|
.numDataNodes(1).build();
|
||||||
|
|
||||||
|
HATestUtil.setFailoverConfigurations(cluster, conf, LOGICAL_NAME);
|
||||||
|
|
||||||
|
cluster.waitActive();
|
||||||
|
DFSTestUtil.addHAConfiguration(conf, LOGICAL_NAME + "remote");
|
||||||
|
DFSTestUtil.setFakeHttpAddresses(conf, LOGICAL_NAME + "remote");
|
||||||
|
|
||||||
|
fs = (WebHdfsFileSystem)FileSystem.get(WEBHDFS_URI, conf);
|
||||||
|
Assert.assertEquals(2, fs.getResolvedNNAddr().length);
|
||||||
|
} finally {
|
||||||
|
IOUtils.cleanup(null, fs);
|
||||||
|
if (cluster != null) {
|
||||||
|
cluster.shutdown();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user