diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java index 1594ffe0ea..3bb807341a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java @@ -33,6 +33,7 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -1325,7 +1326,14 @@ Collection> getTokens() { * @return Credentials of tokens associated with this user */ public synchronized Credentials getCredentials() { - return new Credentials(getCredentialsInternal()); + Credentials creds = new Credentials(getCredentialsInternal()); + Iterator> iter = creds.getAllTokens().iterator(); + while (iter.hasNext()) { + if (iter.next() instanceof Token.PrivateToken) { + iter.remove(); + } + } + return creds; } /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java index 905c948da7..14d81910b5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java @@ -19,31 +19,20 @@ package org.apache.hadoop.security.token; import com.google.common.collect.Maps; - -import java.io.ByteArrayInputStream; -import java.io.DataInput; -import java.io.DataInputStream; -import java.io.DataOutput; -import java.io.IOException; -import java.util.Arrays; -import java.util.Map; -import java.util.ServiceLoader; - import org.apache.commons.codec.binary.Base64; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.DataInputBuffer; -import org.apache.hadoop.io.DataOutputBuffer; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.WritableComparator; -import org.apache.hadoop.io.WritableUtils; +import org.apache.hadoop.io.*; import org.apache.hadoop.util.ReflectionUtils; +import java.io.*; +import java.util.Arrays; +import java.util.Map; +import java.util.ServiceLoader; + /** * The client-side form of the token. */ @@ -195,6 +184,19 @@ public void setService(Text newService) { service = newService; } + /** + * Indicates whether the token is a clone. Used by HA failover proxy + * to indicate a token should not be visible to the user via + * UGI.getCredentials() + */ + @InterfaceAudience.Private + @InterfaceStability.Unstable + public static class PrivateToken extends Token { + public PrivateToken(Token token) { + super(token); + } + } + @Override public void readFields(DataInput in) throws IOException { int len = WritableUtils.readVInt(in); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java index aa40cf48bd..baa95b14fc 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java @@ -16,11 +16,21 @@ */ package org.apache.hadoop.security; -import static org.junit.Assert.*; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.ipc.TestSaslRPC; +import org.apache.hadoop.metrics2.MetricsRecordBuilder; +import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; +import org.apache.hadoop.security.authentication.util.KerberosName; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.hadoop.util.Shell; import org.junit.*; -import static org.mockito.Mockito.*; - +import javax.security.auth.Subject; +import javax.security.auth.login.AppConfigurationEntry; +import javax.security.auth.login.LoginContext; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; @@ -30,21 +40,13 @@ import java.util.LinkedHashSet; import java.util.Set; -import javax.security.auth.Subject; -import javax.security.auth.login.AppConfigurationEntry; -import javax.security.auth.login.LoginContext; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.metrics2.MetricsRecordBuilder; -import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; -import org.apache.hadoop.security.authentication.util.KerberosName; -import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.security.token.TokenIdentifier; -import static org.apache.hadoop.test.MetricsAsserts.*; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL; -import org.apache.hadoop.util.Shell; +import static org.apache.hadoop.ipc.TestSaslRPC.*; +import static org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenIdentifier; +import static org.apache.hadoop.test.MetricsAsserts.*; +import static org.junit.Assert.*; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class TestUserGroupInformation { final private static String USER_NAME = "user1@HADOOP.APACHE.ORG"; @@ -786,4 +788,29 @@ public void testSetLoginUser() throws IOException { UserGroupInformation.setLoginUser(ugi); assertEquals(ugi, UserGroupInformation.getLoginUser()); } + + /** + * In some scenario, such as HA, delegation tokens are associated with a + * logical name. The tokens are cloned and are associated with the + * physical address of the server where the service is provided. + * This test ensures cloned delegated tokens are locally used + * and are not returned in {@link UserGroupInformation#getCredentials()} + */ + @Test + public void testPrivateTokenExclusion() throws Exception { + UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); + TestTokenIdentifier tokenId = new TestTokenIdentifier(); + Token token = new Token( + tokenId.getBytes(), "password".getBytes(), + tokenId.getKind(), null); + ugi.addToken(new Text("regular-token"), token); + + // Now add cloned private token + ugi.addToken(new Text("private-token"), new Token.PrivateToken(token)); + ugi.addToken(new Text("private-token1"), new Token.PrivateToken(token)); + + // Ensure only non-private tokens are returned + Collection> tokens = ugi.getCredentials().getAllTokens(); + assertEquals(1, tokens.size()); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HAUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HAUtil.java index 9674b6d6f7..7d53fb991d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HAUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HAUtil.java @@ -17,15 +17,9 @@ */ package org.apache.hadoop.hdfs; -import static org.apache.hadoop.hdfs.DFSConfigKeys.*; -import java.io.IOException; -import java.net.InetSocketAddress; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Map; - +import com.google.common.base.Joiner; +import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; @@ -41,11 +35,17 @@ import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; -import static org.apache.hadoop.hdfs.protocol.HdfsConstants.HA_DT_SERVICE_PREFIX; -import com.google.common.base.Joiner; -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Map; + +import static org.apache.hadoop.hdfs.DFSConfigKeys.*; +import static org.apache.hadoop.hdfs.protocol.HdfsConstants.HA_DT_SERVICE_PREFIX; public class HAUtil { @@ -265,10 +265,15 @@ public static void cloneDelegationTokenForLogicalUri( tokenSelector.selectToken(haService, ugi.getTokens()); if (haToken != null) { for (InetSocketAddress singleNNAddr : nnAddrs) { + // this is a minor hack to prevent physical HA tokens from being + // exposed to the user via UGI.getCredentials(), otherwise these + // cloned tokens may be inadvertently propagated to jobs Token specificToken = - new Token(haToken); + new Token.PrivateToken(haToken); SecurityUtil.setTokenService(specificToken, singleNNAddr); - ugi.addToken(specificToken); + Text alias = + new Text(HA_DT_SERVICE_PREFIX + "//" + specificToken.getService()); + ugi.addToken(alias, specificToken); LOG.debug("Mapped HA service delegation token for logical URI " + haUri + " to namenode " + singleNNAddr); }