HDFS-3255. HA DFS returns wrong token service. Contributed by Daryn Sharp.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1325414 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
f01ede227f
commit
72406e940a
@ -501,6 +501,8 @@ Release 2.0.0 - UNRELEASED
|
||||
HDFS-3260. TestDatanodeRegistration should set minimum DN version in
|
||||
addition to minimum NN version. (atm)
|
||||
|
||||
HDFS-3255. HA DFS returns wrong token service (Daryn Sharp via todd)
|
||||
|
||||
BREAKDOWN OF HDFS-1623 SUBTASKS
|
||||
|
||||
HDFS-2179. Add fencing framework and mechanisms for NameNode HA. (todd)
|
||||
|
@ -848,8 +848,9 @@ public void setBalancerBandwidth(long bandwidth) throws IOException {
|
||||
*/
|
||||
@Override
|
||||
public String getCanonicalServiceName() {
|
||||
if (HAUtil.isLogicalUri(getConf(), getUri())) {
|
||||
return getUri().getHost();
|
||||
URI uri = getUri();
|
||||
if (HAUtil.isLogicalUri(getConf(), uri)) {
|
||||
return HAUtil.buildTokenServiceForLogicalUri(uri).toString();
|
||||
} else {
|
||||
return super.getCanonicalServiceName();
|
||||
}
|
||||
|
@ -42,6 +42,7 @@
|
||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.security.SecurityUtil;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||
@ -196,8 +197,7 @@ public void testHAUtilClonesDelegationTokens() throws Exception {
|
||||
// check that the token selected for one of the physical IPC addresses
|
||||
// matches the one we received
|
||||
InetSocketAddress addr = nn0.getNameNodeAddress();
|
||||
Text ipcDtService = new Text(
|
||||
addr.getAddress().getHostAddress() + ":" + addr.getPort());
|
||||
Text ipcDtService = SecurityUtil.buildTokenService(addr);
|
||||
Token<DelegationTokenIdentifier> token2 =
|
||||
DelegationTokenSelector.selectHdfsDelegationToken(ipcDtService, ugi);
|
||||
assertNotNull(token2);
|
||||
@ -212,8 +212,15 @@ public void testHAUtilClonesDelegationTokens() throws Exception {
|
||||
*/
|
||||
@Test
|
||||
public void testDFSGetCanonicalServiceName() throws Exception {
|
||||
assertEquals(fs.getCanonicalServiceName(),
|
||||
HATestUtil.getLogicalUri(cluster).getHost());
|
||||
URI hAUri = HATestUtil.getLogicalUri(cluster);
|
||||
String haService = HAUtil.buildTokenServiceForLogicalUri(hAUri).toString();
|
||||
assertEquals(haService, dfs.getCanonicalServiceName());
|
||||
Token<?> token = dfs.getDelegationToken(
|
||||
UserGroupInformation.getCurrentUser().getShortUserName());
|
||||
assertEquals(haService, token.getService().toString());
|
||||
// make sure the logical uri is handled correctly
|
||||
token.renew(dfs.getConf());
|
||||
token.cancel(dfs.getConf());
|
||||
}
|
||||
|
||||
enum TokenTestAction {
|
||||
|
Loading…
Reference in New Issue
Block a user