HDFS-13081. Datanode#checkSecureConfig should allow SASL and privileged HTTP. Contributed by Ajay Kumar.

This commit is contained in:
Xiaoyu Yao 2018-02-28 09:44:39 -08:00
parent 2574375bf5
commit f20e10b2dd
4 changed files with 71 additions and 28 deletions

View File

@ -167,7 +167,9 @@ Because the DataNode data transfer protocol does not use the Hadoop RPC framewor
When you execute the `hdfs datanode` command as root, the server process binds privileged ports at first, then drops privilege and runs as the user account specified by `HDFS_DATANODE_SECURE_USER`. This startup process uses [the jsvc program](https://commons.apache.org/proper/commons-daemon/jsvc.html "Link to Apache Commons Jsvc") installed to `JSVC_HOME`. You must specify `HDFS_DATANODE_SECURE_USER` and `JSVC_HOME` as environment variables on start up (in `hadoop-env.sh`).
As of version 2.6.0, SASL can be used to authenticate the data transfer protocol. In this configuration, it is no longer required for secured clusters to start the DataNode as root using `jsvc` and bind to privileged ports. To enable SASL on data transfer protocol, set `dfs.data.transfer.protection` in hdfs-site.xml, set a non-privileged port for `dfs.datanode.address`, set `dfs.http.policy` to `HTTPS_ONLY` and make sure the `HDFS_DATANODE_SECURE_USER` environment variable is not defined. Note that it is not possible to use SASL on data transfer protocol if `dfs.datanode.address` is set to a privileged port. This is required for backwards-compatibility reasons.
As of version 2.6.0, SASL can be used to authenticate the data transfer protocol. In this configuration, it is no longer required for secured clusters to start the DataNode as root using `jsvc` and bind to privileged ports. To enable SASL on data transfer protocol, set `dfs.data.transfer.protection` in hdfs-site.xml. A SASL enabled DataNode can be started in secure mode in following two ways:
1. Set a non-privileged port for `dfs.datanode.address`.
1. Set `dfs.http.policy` to `HTTPS_ONLY` or set `dfs.datanode.http.address` to a privileged port and make sure the `HDFS_DATANODE_SECURE_USER` and `JSVC_HOME` environment variables are specified properly as environment variables on start up (in `hadoop-env.sh`).
In order to migrate an existing cluster that used root authentication to start using SASL instead, first ensure that version 2.6.0 or later has been deployed to all cluster nodes as well as any external applications that need to connect to the cluster. Only versions 2.6.0 and later of the HDFS client can connect to a DataNode that uses SASL for authentication of data transfer protocol, so it is vital that all callers have the correct version before migrating. After version 2.6.0 or later has been deployed everywhere, update configuration of any external applications to enable SASL. If an HDFS client is enabled for SASL, then it can connect successfully to a DataNode running with either root authentication or SASL authentication. Changing configuration for all clients guarantees that subsequent configuration changes on DataNodes will not disrupt the applications. Finally, each individual DataNode can be migrated by changing its configuration and restarting. It is acceptable to have a mix of some DataNodes running with root authentication and some DataNodes running with SASL authentication temporarily during this migration period, because an HDFS client enabled for SASL can connect to both.

View File

@ -1480,22 +1480,35 @@ private static void checkSecureConfig(DNConf dnConf, Configuration conf,
throw new RuntimeException(errMessage);
}
SaslPropertiesResolver saslPropsResolver = dnConf.getSaslPropsResolver();
if (resources != null && saslPropsResolver == null) {
return;
}
if (dnConf.getIgnoreSecurePortsForTesting()) {
return;
}
if (saslPropsResolver != null &&
DFSUtil.getHttpPolicy(conf) == HttpConfig.Policy.HTTPS_ONLY &&
resources == null) {
if (resources != null) {
final boolean httpSecured = resources.isHttpPortPrivileged()
|| DFSUtil.getHttpPolicy(conf) == HttpConfig.Policy.HTTPS_ONLY;
final boolean rpcSecured = resources.isRpcPortPrivileged()
|| resources.isSaslEnabled();
// Allow secure DataNode to startup if:
// 1. Http is secure.
// 2. Rpc is secure
if (rpcSecured && httpSecured) {
return;
}
throw new RuntimeException("Cannot start secure DataNode without " +
"configuring either privileged resources or SASL RPC data transfer " +
"protection and SSL for HTTP. Using privileged resources in " +
"combination with SASL RPC data transfer protection is not supported.");
} else {
// Handle cases when SecureDataNodeStarter#getSecureResources is not
// invoked
SaslPropertiesResolver saslPropsResolver = dnConf.getSaslPropsResolver();
if (saslPropsResolver != null &&
DFSUtil.getHttpPolicy(conf) == HttpConfig.Policy.HTTPS_ONLY) {
return;
}
}
throw new RuntimeException("Cannot start secure DataNode due to incorrect "
+ "config. See https://cwiki.apache.org/confluence/display/HADOOP/"
+ "Secure+DataNode for details.");
}
public static String generateUuid() {

View File

@ -25,9 +25,9 @@
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
@ -43,11 +43,21 @@ public class SecureDataNodeStarter implements Daemon {
* Stash necessary resources needed for datanode operation in a secure env.
*/
public static class SecureResources {
private final boolean isSaslEnabled;
private final boolean isRpcPortPrivileged;
private final boolean isHttpPortPrivileged;
private final ServerSocket streamingSocket;
private final ServerSocketChannel httpServerSocket;
public SecureResources(ServerSocket streamingSocket, ServerSocketChannel httpServerSocket) {
public SecureResources(ServerSocket streamingSocket, ServerSocketChannel
httpServerSocket, boolean saslEnabled, boolean rpcPortPrivileged,
boolean httpPortPrivileged) {
this.streamingSocket = streamingSocket;
this.httpServerSocket = httpServerSocket;
this.isSaslEnabled = saslEnabled;
this.isRpcPortPrivileged = rpcPortPrivileged;
this.isHttpPortPrivileged = httpPortPrivileged;
}
public ServerSocket getStreamingSocket() { return streamingSocket; }
@ -55,6 +65,18 @@ public SecureResources(ServerSocket streamingSocket, ServerSocketChannel httpSer
public ServerSocketChannel getHttpServerChannel() {
return httpServerSocket;
}
public boolean isSaslEnabled() {
return isSaslEnabled;
}
public boolean isRpcPortPrivileged() {
return isRpcPortPrivileged;
}
public boolean isHttpPortPrivileged() {
return isHttpPortPrivileged;
}
}
private String [] args;
@ -90,8 +112,12 @@ public void start() throws Exception {
public static SecureResources getSecureResources(Configuration conf)
throws Exception {
HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
boolean isSecure = UserGroupInformation.isSecurityEnabled();
boolean isSaslEnabled =
DataTransferSaslUtil.getSaslPropertiesResolver(conf) != null;
boolean isRpcPrivileged;
boolean isHttpPrivileged = false;
System.err.println("isSaslEnabled:" + isSaslEnabled);
// Obtain secure port for data streaming to datanode
InetSocketAddress streamingAddr = DataNode.getStreamingAddr(conf);
int socketWriteTimeout = conf.getInt(
@ -118,12 +144,7 @@ public static SecureResources getSecureResources(Configuration conf)
+ "context. Needed " + streamingAddr.getPort() + ", got "
+ ss.getLocalPort());
}
if (!SecurityUtil.isPrivilegedPort(ss.getLocalPort()) && isSecure) {
throw new RuntimeException(
"Cannot start secure datanode with unprivileged RPC ports");
}
isRpcPrivileged = SecurityUtil.isPrivilegedPort(ss.getLocalPort());
System.err.println("Opened streaming server at " + streamingAddr);
// Bind a port for the web server. The code intends to bind HTTP server to
@ -151,16 +172,14 @@ public static SecureResources getSecureResources(Configuration conf)
System.err.println("Successfully obtained privileged resources (streaming port = "
+ ss + " ) (http listener port = " + localAddr.getPort() +")");
if (localAddr.getPort() > 1023 && isSecure) {
throw new RuntimeException(
"Cannot start secure datanode with unprivileged HTTP ports");
}
isHttpPrivileged = SecurityUtil.isPrivilegedPort(localAddr.getPort());
System.err.println("Opened info server at " + infoSocAddr);
} else {
httpChannel = null;
}
return new SecureResources(ss, httpChannel);
return new SecureResources(ss, httpChannel, isSaslEnabled,
isRpcPrivileged, isHttpPrivileged);
}
private static BindException appendMessageToBindException(BindException e,

View File

@ -50,6 +50,7 @@
import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpConfig.Policy;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.test.GenericTestUtils;
@ -173,6 +174,14 @@ public void testDataNodeAbortsIfNotHttpsOnly() throws Exception {
startCluster(clusterConf);
}
@Test
public void testDataNodeStartIfHttpsQopPrivacy() throws Exception {
HdfsConfiguration clusterConf = createSecureConfig("privacy");
clusterConf.set(DFS_HTTP_POLICY_KEY,
Policy.HTTPS_ONLY.name());
startCluster(clusterConf);
}
@Test
public void testNoSaslAndSecurePortsIgnored() throws Exception {
HdfsConfiguration clusterConf = createSecureConfig("");