MAPREDUCE-3149. Add a test to verify that TokenCache handles file system uri with no authority. Contributed by John George.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1186516 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jitendra Nath Pandey 2011-10-19 21:52:50 +00:00
parent 0bd8f0bd40
commit ba66ca6856
2 changed files with 29 additions and 0 deletions

View File

@ -25,6 +25,9 @@ Trunk (unreleased changes)
MAPREDUCE-3171. normalize nodemanager native code compilation with common/hdfs
native. (tucu)
MAPREDUCE-3149. Add a test to verify that TokenCache handles file system
uri with no authority. (John George via jitendra)
BUG FIXES
MAPREDUCE-3166. [Rumen] Make Rumen use job history api instead of relying

View File

@ -63,6 +63,7 @@
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.tools.HadoopArchives;
import org.apache.hadoop.util.ToolRunner;
import org.codehaus.jackson.map.ObjectMapper;
import org.junit.AfterClass;
@ -428,4 +429,29 @@ public void testGetTokensForViewFS() throws IOException, URISyntaxException {
assertTrue("didn't find token for [" + lp1 + ", " + lp2 + "]", found);
}
}
@Test
public void testGetTokensForUriWithoutAuth() throws IOException {
FileSystem fs = dfsCluster.getFileSystem();
HadoopArchives har = new HadoopArchives(jConf);
Path archivePath = new Path(fs.getHomeDirectory(), "tmp");
String[] args = new String[6];
args[0] = "-archiveName";
args[1] = "foo1.har";
args[2] = "-p";
args[3] = fs.getHomeDirectory().toString();
args[4] = "test";
args[5] = archivePath.toString();
try {
int ret = ToolRunner.run(har, args);
} catch (Exception e) {
fail("Could not create har file");
}
Path finalPath = new Path(archivePath, "foo1.har");
Path filePath = new Path(finalPath, "test");
Credentials credentials = new Credentials();
TokenCache.obtainTokensForNamenodesInternal(
credentials, new Path [] {finalPath}, jConf);
}
}