diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java index 0fd400e71b..89490bca98 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java @@ -19,6 +19,8 @@ import org.ietf.jgss.GSSManager; import org.ietf.jgss.GSSName; import org.ietf.jgss.Oid; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.security.auth.Subject; import javax.security.auth.login.AppConfigurationEntry; @@ -44,6 +46,9 @@ * sequence. */ public class KerberosAuthenticator implements Authenticator { + + private static Logger LOG = LoggerFactory.getLogger( + KerberosAuthenticator.class); /** * HTTP header used by the SPNEGO server endpoint during an authentication sequence. @@ -152,9 +157,18 @@ public void authenticate(URL url, AuthenticatedURL.Token token) } conn.setRequestMethod(AUTH_HTTP_METHOD); conn.connect(); - if (isNegotiate()) { + + if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) { + LOG.debug("JDK performed authentication on our behalf."); + // If the JDK already did the SPNEGO back-and-forth for + // us, just pull out the token. + AuthenticatedURL.extractToken(conn, token); + return; + } else if (isNegotiate()) { + LOG.debug("Performing our own SPNEGO sequence."); doSpnegoSequence(token); } else { + LOG.debug("Using fallback authenticator sequence."); getFallBackAuthenticator().authenticate(url, token); } } @@ -168,7 +182,11 @@ public void authenticate(URL url, AuthenticatedURL.Token token) * @return the fallback {@link Authenticator}. */ protected Authenticator getFallBackAuthenticator() { - return new PseudoAuthenticator(); + Authenticator auth = new PseudoAuthenticator(); + if (connConfigurator != null) { + auth.setConnectionConfigurator(connConfigurator); + } + return auth; } /* @@ -197,11 +215,16 @@ private void doSpnegoSequence(AuthenticatedURL.Token token) throws IOException, AccessControlContext context = AccessController.getContext(); Subject subject = Subject.getSubject(context); if (subject == null) { + LOG.debug("No subject in context, logging in"); subject = new Subject(); LoginContext login = new LoginContext("", subject, null, new KerberosConfiguration()); login.login(); } + + if (LOG.isDebugEnabled()) { + LOG.debug("Using subject: " + subject); + } Subject.doAs(subject, new PrivilegedExceptionAction() { @Override diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 14289ad1a7..87b51b4a1c 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -579,6 +579,8 @@ Release 2.0.2-alpha - 2012-09-07 HADOOP-8781. hadoop-config.sh should add JAVA_LIBRARY_PATH to LD_LIBRARY_PATH. (tucu) + HADOOP-8855. SSL-based image transfer does not work when Kerberos is disabled. (todd via eli) + BREAKDOWN OF HDFS-3042 SUBTASKS HADOOP-8220. ZKFailoverController doesn't handle failure to become active diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java index 0815c6790d..66ffe20c6b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java @@ -499,7 +499,7 @@ private static T doAsUser(UserGroupInformation ugi, * @throws IOException If unable to authenticate via SPNEGO */ public static URLConnection openSecureHttpConnection(URL url) throws IOException { - if(!UserGroupInformation.isSecurityEnabled()) { + if (!HttpConfig.isSecure() && !UserGroupInformation.isSecurityEnabled()) { return url.openConnection(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java index 421f7bc3fd..86e7d77e63 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java @@ -20,6 +20,7 @@ import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; +import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -53,6 +54,7 @@ import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.RefreshUserMappingsProtocol; +import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol; import org.apache.hadoop.util.StringUtils; @@ -511,11 +513,17 @@ public int setBalancerBandwidth(String[] argv, int idx) throws IOException { * @return an exit code indicating success or failure. * @throws IOException */ - public int fetchImage(String[] argv, int idx) throws IOException { - String infoServer = DFSUtil.getInfoServer( + public int fetchImage(final String[] argv, final int idx) throws IOException { + final String infoServer = DFSUtil.getInfoServer( HAUtil.getAddressOfActive(getDFS()), getConf(), false); - TransferFsImage.downloadMostRecentImageToDirectory(infoServer, - new File(argv[idx])); + SecurityUtil.doAsCurrentUser(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + TransferFsImage.downloadMostRecentImageToDirectory(infoServer, + new File(argv[idx])); + return null; + } + }); return 0; }