diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 6e6c714acc..ed64263971 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -158,6 +158,9 @@ Release 0.23.1 - UNRELEASED HDFS-2587. Add apt doc for WebHDFS REST API. (szetszwo) + HDFS-2604. Add a log message to show if WebHDFS is enabled and a + configuration section in the forrest doc. (szetszwo) + OPTIMIZATIONS HDFS-2130. Switch default checksum to CRC32C. (todd) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/webhdfs.xml b/hadoop-hdfs-project/hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/webhdfs.xml index e0c56eaa22..32a26f0c4a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/webhdfs.xml +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/webhdfs.xml @@ -138,6 +138,28 @@ http://<HOST>:<HTTP_PORT>/webhdfs/v1/<PATH>?op=... + +
+ HDFS Configuration Options +

+ Below are the HDFS configuration options for WebHDFS. +

+ + + + + + + + +
Property NameDescription
dfs.webhdfs.enabledEnable/disable WebHDFS in Namenodes and Datanodes +
dfs.web.authentication.kerberos.principalThe HTTP Kerberos principal used by Hadoop-Auth in the HTTP endpoint. + The HTTP Kerberos principal MUST start with 'HTTP/' per Kerberos + HTTP SPENGO specification. +
dfs.web.authentication.kerberos.keytabThe Kerberos keytab file with the credentials for the + HTTP Kerberos principal used by Hadoop-Auth in the HTTP endpoint. +
+
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java index 65ccba80dc..4651202346 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java @@ -35,9 +35,9 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HANDLER_COUNT_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HANDLER_COUNT_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HOST_NAME_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTP_ADDRESS_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTP_ADDRESS_KEY; -import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_IPC_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_PLUGINS_KEY; @@ -50,8 +50,6 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_FEDERATION_NAMESERVICES; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HTTPS_ENABLE_KEY; -import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY; -import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEBHDFS_ENABLED_DEFAULT; import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; @@ -95,6 +93,7 @@ import org.apache.hadoop.hdfs.HDFSPolicyProvider; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.protocol.Block; +import org.apache.hadoop.hdfs.protocol.BlockLocalPathInfo; import org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; @@ -133,7 +132,6 @@ import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo; -import org.apache.hadoop.hdfs.protocol.BlockLocalPathInfo; import org.apache.hadoop.hdfs.server.protocolR23Compatible.InterDatanodeProtocolServerSideTranslatorR23; import org.apache.hadoop.hdfs.server.protocolR23Compatible.InterDatanodeProtocolTranslatorR23; import org.apache.hadoop.hdfs.server.protocolR23Compatible.InterDatanodeWireProtocol; @@ -494,7 +492,7 @@ conf, new AccessControlList(conf.get(DFS_ADMIN, " ")), this.infoServer.addServlet(null, "/blockScannerReport", DataBlockScanner.Servlet.class); - if (conf.getBoolean(DFS_WEBHDFS_ENABLED_KEY, DFS_WEBHDFS_ENABLED_DEFAULT)) { + if (WebHdfsFileSystem.isEnabled(conf, LOG)) { infoServer.addJerseyResourcePackage(DatanodeWebHdfsMethods.class .getPackage().getName() + ";" + Param.class.getPackage().getName(), WebHdfsFileSystem.PATH_PREFIX + "/*"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java index 6f476ec0f3..d8b265913e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java @@ -108,8 +108,7 @@ public HttpServer run() throws IOException, InterruptedException { infoPort == 0, conf, new AccessControlList(conf.get(DFSConfigKeys.DFS_ADMIN, " "))) { { - if (conf.getBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, - DFSConfigKeys.DFS_WEBHDFS_ENABLED_DEFAULT)) { + if (WebHdfsFileSystem.isEnabled(conf, LOG)) { //add SPNEGO authentication filter for webhdfs final String name = "SPNEGO"; final String classname = AuthFilter.class.getName(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index 187bb8c9d3..d10e70888e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -131,6 +131,14 @@ private static synchronized void addRenewAction(final WebHdfsFileSystem webhdfs) DT_RENEWER.addRenewAction(webhdfs); } + /** Is WebHDFS enabled in conf? */ + public static boolean isEnabled(final Configuration conf, final Log log) { + final boolean b = conf.getBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, + DFSConfigKeys.DFS_WEBHDFS_ENABLED_DEFAULT); + log.info(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY + " = " + b); + return b; + } + private final UserGroupInformation ugi; private InetSocketAddress nnAddr; private Token delegationToken; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java index b1f9dcec92..cdad31cc9b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java @@ -55,6 +55,7 @@ import org.apache.hadoop.hdfs.web.resources.DoAsParam; import org.apache.hadoop.hdfs.web.resources.ExceptionHandler; import org.apache.hadoop.hdfs.web.resources.GetOpParam; +import org.apache.hadoop.hdfs.web.resources.PostOpParam; import org.apache.hadoop.hdfs.web.resources.PutOpParam; import org.apache.hadoop.security.TestDoAsEffectiveUser; import org.apache.hadoop.security.UserGroupInformation; @@ -198,9 +199,9 @@ public String getName() { Assert.assertEquals("/user/" + PROXY_USER, responsePath); } + final Path f = new Path("/testWebHdfsDoAs/a.txt"); { //test create file with doAs - final Path f = new Path("/testWebHdfsDoAs/a.txt"); final PutOpParam.Op op = PutOpParam.Op.CREATE; final URL url = WebHdfsTestUtil.toUrl(webhdfs, op, f, new DoAsParam(PROXY_USER)); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); @@ -213,5 +214,21 @@ public String getName() { WebHdfsTestUtil.LOG.info("status.getOwner()=" + status.getOwner()); Assert.assertEquals(PROXY_USER, status.getOwner()); } + + { + //test append file with doAs + final PostOpParam.Op op = PostOpParam.Op.APPEND; + final URL url = WebHdfsTestUtil.toUrl(webhdfs, op, f, new DoAsParam(PROXY_USER)); + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + conn = WebHdfsTestUtil.twoStepWrite(conn, op); + final FSDataOutputStream out = WebHdfsTestUtil.write(webhdfs, op, conn, 4096); + out.write("\nHello again!".getBytes()); + out.close(); + + final FileStatus status = webhdfs.getFileStatus(f); + WebHdfsTestUtil.LOG.info("status.getOwner()=" + status.getOwner()); + WebHdfsTestUtil.LOG.info("status.getLen() =" + status.getLen()); + Assert.assertEquals(PROXY_USER, status.getOwner()); + } } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java index 7f6aa36a6e..909ce82c24 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java @@ -33,6 +33,7 @@ static FileStatus toFileStatus(HdfsFileStatus f, String parent) { return new FileStatus(f.getLen(), f.isDir(), f.getReplication(), f.getBlockSize(), f.getModificationTime(), f.getAccessTime(), f.getPermission(), f.getOwner(), f.getGroup(), + f.isSymlink() ? new Path(f.getSymlink()) : null, new Path(f.getFullName(parent))); }