HDFS-5876. SecureDataNodeStarter does not pick up configuration in hdfs-site.xml. Contributed by Haohui Mai.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1564897 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jing Zhao 2014-02-05 19:43:56 +00:00
parent dcaaeefeaf
commit 69dbf0b225
4 changed files with 14 additions and 6 deletions

View File

@ -869,6 +869,9 @@ Release 2.3.0 - UNRELEASED
HDFS-5399. Revisit SafeModeException and corresponding retry policies. HDFS-5399. Revisit SafeModeException and corresponding retry policies.
(Jing Zhao via todd) (Jing Zhao via todd)
HDFS-5876. SecureDataNodeStarter does not pick up configuration in
hdfs-site.xml. (Haohui Mai via jing9)
BREAKDOWN OF HDFS-2832 SUBTASKS AND RELATED JIRAS BREAKDOWN OF HDFS-2832 SUBTASKS AND RELATED JIRAS
HDFS-4985. Add storage type to the protocol and expose it in block report HDFS-4985. Add storage type to the protocol and expose it in block report

View File

@ -362,13 +362,13 @@ private void startInfoServer(Configuration conf) throws IOException {
.setConf(conf).setACL(new AccessControlList(conf.get(DFS_ADMIN, " "))); .setConf(conf).setACL(new AccessControlList(conf.get(DFS_ADMIN, " ")));
HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf); HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf);
String infoHost = infoSocAddr.getHostName();
if (policy.isHttpEnabled()) { if (policy.isHttpEnabled()) {
if (secureResources == null) { if (secureResources == null) {
InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf);
int port = infoSocAddr.getPort(); int port = infoSocAddr.getPort();
builder.addEndpoint(URI.create("http://" + infoHost + ":" + port)); builder.addEndpoint(URI.create("http://"
+ NetUtils.getHostPortString(infoSocAddr)));
if (port == 0) { if (port == 0) {
builder.setFindPort(true); builder.setFindPort(true);
} }
@ -381,7 +381,7 @@ private void startInfoServer(Configuration conf) throws IOException {
if (policy.isHttpsEnabled()) { if (policy.isHttpsEnabled()) {
InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf.get( InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf.get(
DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":" + 0)); DFS_DATANODE_HTTPS_ADDRESS_KEY, DFS_DATANODE_HTTPS_ADDRESS_DEFAULT));
Configuration sslConf = DFSUtil.loadSslConfiguration(conf); Configuration sslConf = DFSUtil.loadSslConfiguration(conf);
DFSUtil.loadSslConfToHttpServerBuilder(builder, sslConf); DFSUtil.loadSslConfToHttpServerBuilder(builder, sslConf);
@ -390,7 +390,8 @@ private void startInfoServer(Configuration conf) throws IOException {
if (port == 0) { if (port == 0) {
builder.setFindPort(true); builder.setFindPort(true);
} }
builder.addEndpoint(URI.create("https://" + infoHost + ":" + port)); builder.addEndpoint(URI.create("https://"
+ NetUtils.getHostPortString(secInfoSocAddr)));
} }
this.infoServer = builder.build(); this.infoServer = builder.build();

View File

@ -25,6 +25,7 @@
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.http.HttpServer2;
@ -62,7 +63,9 @@ public SecureResources(ServerSocket streamingSocket,
@Override @Override
public void init(DaemonContext context) throws Exception { public void init(DaemonContext context) throws Exception {
System.err.println("Initializing secure datanode resources"); System.err.println("Initializing secure datanode resources");
Configuration conf = new Configuration(); // Create a new HdfsConfiguration object to ensure that the configuration in
// hdfs-site.xml is picked up.
Configuration conf = new HdfsConfiguration();
// Stash command-line arguments for regular datanode // Stash command-line arguments for regular datanode
args = context.getArguments(); args = context.getArguments();

View File

@ -85,6 +85,7 @@ public static void tearDown() throws Exception {
@Test @Test
public void testHttpPolicy() throws Exception { public void testHttpPolicy() throws Exception {
conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, policy.name()); conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, policy.name());
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
InetSocketAddress addr = InetSocketAddress.createUnresolved("localhost", 0); InetSocketAddress addr = InetSocketAddress.createUnresolved("localhost", 0);
NameNodeHttpServer server = null; NameNodeHttpServer server = null;