MAPREDUCE-5275. Bring back a couple of APIs in mapreduce.security.TokenCache for binary compatibility with 1.x mapreduce APIs. Contributed by Mayank Bansal.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1488369 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2013-05-31 19:48:03 +00:00
parent 781e82ca9a
commit 73b88956f5
3 changed files with 53 additions and 0 deletions

View File

@ -252,6 +252,9 @@ Release 2.0.5-beta - UNRELEASED
MAPREDUCE-5176. Add annotation for tagging tasks as responsive to MAPREDUCE-5176. Add annotation for tagging tasks as responsive to
preemption. (Carlo Curino, cdouglas) preemption. (Carlo Curino, cdouglas)
MAPREDUCE-5275. Bring back a couple of APIs in mapreduce.security.TokenCache
for binary compatibility with 1.x mapreduce APIs. (Mayank Bansal via vinodkv)
OPTIMIZATIONS OPTIMIZATIONS
MAPREDUCE-4974. Optimising the LineRecordReader initialize() method MAPREDUCE-4974. Optimising the LineRecordReader initialize() method

View File

@ -177,6 +177,19 @@ public static Credentials loadTokens(String jobTokenFile, JobConf conf)
} }
return ts; return ts;
} }
/**
* load job token from a file
*
* @param conf
* @throws IOException
*/
@InterfaceAudience.Private
public static Credentials loadTokens(String jobTokenFile, Configuration conf)
throws IOException {
return loadTokens(jobTokenFile, new JobConf(conf));
}
/** /**
* store job token * store job token
* @param t * @param t
@ -205,4 +218,16 @@ public static void setShuffleSecretKey(byte[] key, Credentials credentials) {
public static byte[] getShuffleSecretKey(Credentials credentials) { public static byte[] getShuffleSecretKey(Credentials credentials) {
return getSecretKey(credentials, SHUFFLE_TOKEN); return getSecretKey(credentials, SHUFFLE_TOKEN);
} }
/**
*
* @param namenode
* @return delegation token
*/
public static
Token<?> getDelegationToken(
Credentials credentials, String namenode) {
return (Token<?>) credentials.getToken(new Text(
namenode));
}
} }

View File

@ -24,6 +24,7 @@
import java.io.IOException; import java.io.IOException;
import java.net.URI; import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -168,4 +169,28 @@ public void testCleanUpTokenReferral() throws Exception {
TokenCache.cleanUpTokenReferral(conf); TokenCache.cleanUpTokenReferral(conf);
assertNull(conf.get(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY)); assertNull(conf.get(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY));
} }
@Test
public void testGetTokensForNamenodes() throws IOException,
URISyntaxException {
Path TEST_ROOT_DIR =
new Path(System.getProperty("test.build.data", "test/build/data"));
// ick, but need fq path minus file:/
String binaryTokenFile =
FileSystem.getLocal(conf)
.makeQualified(new Path(TEST_ROOT_DIR, "tokenFile")).toUri()
.getPath();
MockFileSystem fs1 = createFileSystemForServiceName("service1");
Credentials creds = new Credentials();
Token<?> token1 = fs1.getDelegationToken(renewer);
creds.addToken(token1.getService(), token1);
// wait to set, else the obtain tokens call above will fail with FNF
conf.set(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY, binaryTokenFile);
creds.writeTokenStorageFile(new Path(binaryTokenFile), conf);
TokenCache.obtainTokensForNamenodesInternal(fs1, creds, conf);
String fs_addr = fs1.getCanonicalServiceName();
Token<?> nnt = TokenCache.getDelegationToken(creds, fs_addr);
assertNotNull("Token for nn is null", nnt);
}
} }