HADOOP-9374. Add tokens from -tokenCacheFile into UGI (daryn)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1454019 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Daryn Sharp 2013-03-07 19:22:44 +00:00
parent 97c4668312
commit 6d4ab86412
3 changed files with 30 additions and 7 deletions

View File

@ -1529,6 +1529,8 @@ Release 0.23.7 - UNRELEASED
HADOOP-9209. Add shell command to dump file checksums (Todd Lipcon via
jeagles)
HADOOP-9374. Add tokens from -tokenCacheFile into UGI (daryn)
OPTIMIZATIONS
HADOOP-8462. Native-code implementation of bzip2 codec. (Govind Kamat via

View File

@ -42,6 +42,8 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
/**
* <code>GenericOptionsParser</code> is a utility to parse command line
@ -321,15 +323,17 @@ private void processGeneralOptions(Configuration conf,
String fileName = line.getOptionValue("tokenCacheFile");
// check if the local file exists
FileSystem localFs = FileSystem.getLocal(conf);
Path p = new Path(fileName);
Path p = localFs.makeQualified(new Path(fileName));
if (!localFs.exists(p)) {
throw new FileNotFoundException("File "+fileName+" does not exist.");
}
if(LOG.isDebugEnabled()) {
LOG.debug("setting conf tokensFile: " + fileName);
}
conf.set("mapreduce.job.credentials.json", localFs.makeQualified(p)
.toString(), "from -tokenCacheFile command line option");
UserGroupInformation.getCurrentUser().addCredentials(
Credentials.readTokenStorageFile(p, conf));
conf.set("mapreduce.job.credentials.json", p.toString(),
"from -tokenCacheFile command line option");
}
}

View File

@ -27,6 +27,11 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
@ -164,13 +169,25 @@ public void testTokenCacheOption() throws IOException {
th instanceof FileNotFoundException);
// create file
Path tmpPath = new Path(tmpFile.toString());
localFs.create(tmpPath);
Path tmpPath = localFs.makeQualified(new Path(tmpFile.toString()));
Token<?> token = new Token<AbstractDelegationTokenIdentifier>(
"identifier".getBytes(), "password".getBytes(),
new Text("token-kind"), new Text("token-service"));
Credentials creds = new Credentials();
creds.addToken(new Text("token-alias"), token);
creds.writeTokenStorageFile(tmpPath, conf);
new GenericOptionsParser(conf, args);
String fileName = conf.get("mapreduce.job.credentials.json");
assertNotNull("files is null", fileName);
assertEquals("files option does not match",
localFs.makeQualified(tmpPath).toString(), fileName);
assertEquals("files option does not match", tmpPath.toString(), fileName);
Credentials ugiCreds =
UserGroupInformation.getCurrentUser().getCredentials();
assertEquals(1, ugiCreds.numberOfTokens());
Token<?> ugiToken = ugiCreds.getToken(new Text("token-alias"));
assertNotNull(ugiToken);
assertEquals(token, ugiToken);
localFs.delete(new Path(testDir.getAbsolutePath()), true);
}