HADOOP-7810. move hadoop archive to core from tools. (tucu)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1213907 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Alejandro Abdelnur 2011-12-13 20:17:59 +00:00
parent f2f4e93413
commit 0201be46c2
6 changed files with 19 additions and 0 deletions

View File

@ -136,6 +136,8 @@ Trunk (unreleased changes)
HADOOP-7913 Fix bug in ProtoBufRpcEngine (sanjay) HADOOP-7913 Fix bug in ProtoBufRpcEngine (sanjay)
HADOOP-7810. move hadoop archive to core from tools. (tucu)
OPTIMIZATIONS OPTIMIZATIONS
HADOOP-7761. Improve the performance of raw comparisons. (todd) HADOOP-7761. Improve the performance of raw comparisons. (todd)

View File

@ -111,6 +111,14 @@ public void setConf(Configuration conf) {
} else { } else {
this.conf = new JobConf(conf, HadoopArchives.class); this.conf = new JobConf(conf, HadoopArchives.class);
} }
// This is for test purposes since MR2, different from Streaming
// here it is not possible to add a JAR to the classpath the tool
// will when running the mapreduce job.
String testJar = System.getProperty(TEST_HADOOP_ARCHIVES_JAR_PATH, null);
if (testJar != null) {
((JobConf)conf).setJar(testJar);
}
} }
public Configuration getConf() { public Configuration getConf() {
@ -868,9 +876,12 @@ public int run(String[] args) throws Exception {
return 0; return 0;
} }
static final String TEST_HADOOP_ARCHIVES_JAR_PATH = "test.hadoop.archives.jar";
/** the main functions **/ /** the main functions **/
public static void main(String[] args) { public static void main(String[] args) {
JobConf job = new JobConf(HadoopArchives.class); JobConf job = new JobConf(HadoopArchives.class);
HadoopArchives harchives = new HadoopArchives(job); HadoopArchives harchives = new HadoopArchives(job);
int ret = 0; int ret = 0;

View File

@ -39,6 +39,7 @@
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.mapred.MiniMRCluster; import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.util.JarFinder;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Level; import org.apache.log4j.Level;
@ -46,6 +47,9 @@
* test {@link HadoopArchives} * test {@link HadoopArchives}
*/ */
public class TestHadoopArchives extends TestCase { public class TestHadoopArchives extends TestCase {
public static final String HADOOP_ARCHIVES_JAR = JarFinder.getJar(HadoopArchives.class);
{ {
((Log4JLogger)LogFactory.getLog(org.apache.hadoop.security.Groups.class) ((Log4JLogger)LogFactory.getLog(org.apache.hadoop.security.Groups.class)
).getLogger().setLevel(Level.OFF); ).getLogger().setLevel(Level.OFF);
@ -136,6 +140,7 @@ public void testPathWithSpaces() throws Exception {
"*", "*",
archivePath.toString() archivePath.toString()
}; };
System.setProperty(HadoopArchives.TEST_HADOOP_ARCHIVES_JAR_PATH, HADOOP_ARCHIVES_JAR);
final HadoopArchives har = new HadoopArchives(mapred.createJobConf()); final HadoopArchives har = new HadoopArchives(mapred.createJobConf());
assertEquals(0, ToolRunner.run(har, args)); assertEquals(0, ToolRunner.run(har, args));

View File

@ -29,6 +29,7 @@
<modules> <modules>
<module>hadoop-streaming</module> <module>hadoop-streaming</module>
<module>hadoop-archives</module>
</modules> </modules>
<build> <build>