From cde0e484e7ee99583e6c0b4dd016724f910bd9a7 Mon Sep 17 00:00:00 2001 From: Tsz-wo Sze Date: Fri, 16 Sep 2011 04:01:51 +0000 Subject: [PATCH] HDFS-2338. Add configuration option to enable/disable webhdfs. Contributed by jitendra git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1171379 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../java/org/apache/hadoop/hdfs/DFSConfigKeys.java | 2 ++ .../hadoop/hdfs/server/datanode/DataNode.java | 11 +++++++---- .../hdfs/server/namenode/NameNodeHttpServer.java | 14 ++++++++------ 4 files changed, 20 insertions(+), 10 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 3625c10519..ad57be90a1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -10,6 +10,9 @@ Trunk (unreleased changes) HDFS-2317. Support read access to HDFS in webhdfs. (szetszwo) + HDFS-2338. Add configuration option to enable/disable webhdfs. + (jitendra via szetszwo) + IMPROVEMENTS HADOOP-7524 Change RPC to allow multiple protocols including multuple versions of the same protocol (sanjay Radia) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java index 82fd9fcf5b..c10d185acf 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java @@ -101,6 +101,8 @@ public class DFSConfigKeys extends CommonConfigurationKeys { public static final int DFS_NAMENODE_REPLICATION_PENDING_TIMEOUT_SEC_DEFAULT = -1; public static final String DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY = "dfs.namenode.replication.max-streams"; public static final int DFS_NAMENODE_REPLICATION_MAX_STREAMS_DEFAULT = 2; + public static final String DFS_WEBHDFS_ENABLED_KEY = "dfs.webhdfs.enabled"; + public static final boolean DFS_WEBHDFS_ENABLED_DEFAULT = false; public static final String DFS_PERMISSIONS_ENABLED_KEY = "dfs.permissions.enabled"; public static final boolean DFS_PERMISSIONS_ENABLED_DEFAULT = true; public static final String DFS_PERMISSIONS_SUPERUSERGROUP_KEY = "dfs.permissions.superusergroup"; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java index 81cdc85111..73fbe50e53 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java @@ -62,6 +62,8 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_FEDERATION_NAMESERVICES; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEBHDFS_ENABLED_DEFAULT; import static org.apache.hadoop.hdfs.server.common.Util.now; import java.io.BufferedOutputStream; @@ -549,10 +551,11 @@ conf, new AccessControlList(conf.get(DFS_ADMIN, " ")), this.infoServer.addServlet(null, "/blockScannerReport", DataBlockScanner.Servlet.class); - infoServer.addJerseyResourcePackage( - DatanodeWebHdfsMethods.class.getPackage().getName() - + ";" + Param.class.getPackage().getName(), - "/" + WebHdfsFileSystem.PATH_PREFIX + "/*"); + if (conf.getBoolean(DFS_WEBHDFS_ENABLED_KEY, DFS_WEBHDFS_ENABLED_DEFAULT)) { + infoServer.addJerseyResourcePackage(DatanodeWebHdfsMethods.class + .getPackage().getName() + ";" + Param.class.getPackage().getName(), + "/" + WebHdfsFileSystem.PATH_PREFIX + "/*"); + } this.infoServer.start(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java index d8a0c50054..f9dce83fa2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java @@ -128,7 +128,7 @@ public HttpServer run() throws IOException, InterruptedException { nn.getNameNodeAddress()); httpServer.setAttribute(FSIMAGE_ATTRIBUTE_KEY, nn.getFSImage()); httpServer.setAttribute(JspHelper.CURRENT_CONF, conf); - setupServlets(httpServer); + setupServlets(httpServer, conf); httpServer.start(); // The web-server port can be ephemeral... ensure we have the correct @@ -159,7 +159,7 @@ public InetSocketAddress getHttpAddress() { return httpAddress; } - private static void setupServlets(HttpServer httpServer) { + private static void setupServlets(HttpServer httpServer, Configuration conf) { httpServer.addInternalServlet("getDelegationToken", GetDelegationTokenServlet.PATH_SPEC, GetDelegationTokenServlet.class, true); @@ -182,10 +182,12 @@ private static void setupServlets(HttpServer httpServer) { httpServer.addInternalServlet("contentSummary", "/contentSummary/*", ContentSummaryServlet.class, false); - httpServer.addJerseyResourcePackage( - NamenodeWebHdfsMethods.class.getPackage().getName() - + ";" + Param.class.getPackage().getName(), - "/" + WebHdfsFileSystem.PATH_PREFIX + "/*"); + if (conf.getBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, + DFSConfigKeys.DFS_WEBHDFS_ENABLED_DEFAULT)) { + httpServer.addJerseyResourcePackage(NamenodeWebHdfsMethods.class + .getPackage().getName() + ";" + Param.class.getPackage().getName(), + "/" + WebHdfsFileSystem.PATH_PREFIX + "/*"); + } } public static FSImage getFsImageFromContext(ServletContext context) {