HDFS-3654. TestJspHelper#testGetUgi fails with NPE. Contributed by Eli Collins
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1361463 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
f5dd4583bb
commit
32d4c148df
@ -488,6 +488,8 @@ Branch-2 ( Unreleased changes )
|
||||
HDFS-3609. libhdfs: don't force the URI to look like hdfs://hostname:port.
|
||||
(Colin Patrick McCabe via eli)
|
||||
|
||||
HDFS-3654. TestJspHelper#testGetUgi fails with NPE. (eli)
|
||||
|
||||
BREAKDOWN OF HDFS-3042 SUBTASKS
|
||||
|
||||
HDFS-2185. HDFS portion of ZK-based FailoverController (todd)
|
||||
|
@ -556,7 +556,7 @@ public static UserGroupInformation getUGI(ServletContext context,
|
||||
DelegationTokenIdentifier id = new DelegationTokenIdentifier();
|
||||
id.readFields(in);
|
||||
final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(context);
|
||||
nn.getNamesystem().verifyToken(id, token.getPassword());
|
||||
nn.verifyToken(id, token.getPassword());
|
||||
ugi = id.getUser();
|
||||
if (ugi.getRealUser() == null) {
|
||||
//non-proxy case
|
||||
|
@ -5460,20 +5460,10 @@ public BlockManager getBlockManager() {
|
||||
return blockManager;
|
||||
}
|
||||
|
||||
/**
|
||||
* Verifies that the given identifier and password are valid and match.
|
||||
* @param identifier Token identifier.
|
||||
* @param password Password in the token.
|
||||
* @throws InvalidToken
|
||||
*/
|
||||
public synchronized void verifyToken(DelegationTokenIdentifier identifier,
|
||||
byte[] password) throws InvalidToken {
|
||||
getDelegationTokenSecretManager().verifyToken(identifier, password);
|
||||
}
|
||||
|
||||
public boolean isGenStampInFuture(long genStamp) {
|
||||
return (genStamp > getGenerationStamp());
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public EditLogTailer getEditLogTailer() {
|
||||
return editLogTailer;
|
||||
|
@ -51,6 +51,7 @@
|
||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
||||
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole;
|
||||
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
|
||||
import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
|
||||
@ -78,6 +79,7 @@
|
||||
import org.apache.hadoop.security.SecurityUtil;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
|
||||
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
|
||||
import org.apache.hadoop.tools.GetUserMappingsProtocol;
|
||||
import org.apache.hadoop.util.ServicePlugin;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
@ -1284,6 +1286,17 @@ private synchronized void doImmediateShutdown(Throwable t)
|
||||
terminate(1, t.getMessage());
|
||||
}
|
||||
|
||||
/**
|
||||
* Verifies that the given identifier and password are valid and match.
|
||||
* @param identifier Token identifier.
|
||||
* @param password Password in the token.
|
||||
* @throws InvalidToken
|
||||
*/
|
||||
public synchronized void verifyToken(DelegationTokenIdentifier identifier,
|
||||
byte[] password) throws InvalidToken {
|
||||
namesystem.getDelegationTokenSecretManager().verifyToken(identifier, password);
|
||||
}
|
||||
|
||||
/**
|
||||
* Class used to expose {@link NameNode} as context to {@link HAState}
|
||||
*/
|
||||
|
@ -63,7 +63,7 @@ public class NameNodeHttpServer {
|
||||
|
||||
public static final String NAMENODE_ADDRESS_ATTRIBUTE_KEY = "name.node.address";
|
||||
public static final String FSIMAGE_ATTRIBUTE_KEY = "name.system.image";
|
||||
protected static final String NAMENODE_ATTRIBUTE_KEY = "name.node";
|
||||
public static final String NAMENODE_ATTRIBUTE_KEY = "name.node";
|
||||
|
||||
public NameNodeHttpServer(
|
||||
Configuration conf,
|
||||
|
@ -30,6 +30,7 @@
|
||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||
import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
@ -69,6 +70,7 @@ public void testGetUgi() throws IOException {
|
||||
conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:4321/");
|
||||
HttpServletRequest request = mock(HttpServletRequest.class);
|
||||
ServletContext context = mock(ServletContext.class);
|
||||
NameNode nn = mock(NameNode.class);
|
||||
String user = "TheDoctor";
|
||||
Text userText = new Text(user);
|
||||
DelegationTokenIdentifier dtId = new DelegationTokenIdentifier(userText,
|
||||
@ -79,6 +81,8 @@ public void testGetUgi() throws IOException {
|
||||
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
|
||||
tokenString);
|
||||
when(request.getRemoteUser()).thenReturn(user);
|
||||
when(context.getAttribute(
|
||||
NameNodeHttpServer.NAMENODE_ATTRIBUTE_KEY)).thenReturn(nn);
|
||||
|
||||
//Test attribute in the url to be used as service in the token.
|
||||
when(request.getParameter(JspHelper.NAMENODE_ADDRESS)).thenReturn(
|
||||
|
Loading…
Reference in New Issue
Block a user