MAPREDUCE-4108. Fix tests in org.apache.hadoop.util.TestRunJar (Devaraj K via tgraves)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1312018 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
eb74ff0c3b
commit
94843b848a
@ -230,6 +230,9 @@ Release 2.0.0 - UNRELEASED
|
|||||||
MAPREDUCE-4076. Stream job fails with ZipException when use yarn jar
|
MAPREDUCE-4076. Stream job fails with ZipException when use yarn jar
|
||||||
command (Devaraj K via bobby)
|
command (Devaraj K via bobby)
|
||||||
|
|
||||||
|
MAPREDUCE-4108. Fix tests in org.apache.hadoop.util.TestRunJar
|
||||||
|
(Devaraj K via tgraves)
|
||||||
|
|
||||||
Release 0.23.3 - UNRELEASED
|
Release 0.23.3 - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package testjar;
|
package org.apache.hadoop.util;
|
||||||
|
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -26,15 +26,14 @@
|
|||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public class Hello {
|
public class Hello {
|
||||||
public static void main(String[] args){
|
public static void main(String[] args) {
|
||||||
try {
|
try {
|
||||||
System.out.println("Creating file" + args[0]);
|
System.out.println("Creating file" + args[0]);
|
||||||
FileOutputStream fstream = new FileOutputStream(args[0]);
|
FileOutputStream fstream = new FileOutputStream(args[0]);
|
||||||
fstream.write("Hello Hadoopers".getBytes());
|
fstream.write("Hello Hadoopers".getBytes());
|
||||||
fstream.close();
|
fstream.close();
|
||||||
}
|
} catch (IOException e) {
|
||||||
catch (IOException e) {
|
// do nothing
|
||||||
//do nothing
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -18,34 +18,63 @@
|
|||||||
package org.apache.hadoop.util;
|
package org.apache.hadoop.util;
|
||||||
|
|
||||||
|
|
||||||
|
import java.io.BufferedInputStream;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import org.apache.hadoop.fs.Path;
|
import java.io.FileOutputStream;
|
||||||
import org.junit.Ignore;
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.util.jar.JarOutputStream;
|
||||||
|
import java.util.zip.ZipEntry;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A test to rest the RunJar class.
|
* A test to rest the RunJar class.
|
||||||
*/
|
*/
|
||||||
@Ignore
|
public class TestRunJar {
|
||||||
public class TestRunJar extends TestCase {
|
|
||||||
|
|
||||||
private static String TEST_ROOT_DIR = new Path(System.getProperty(
|
private static String TEST_ROOT_DIR = new Path(System.getProperty(
|
||||||
"test.build.data", "/tmp")).toString();
|
"test.build.data", "/tmp")).toString();
|
||||||
|
|
||||||
public void testRunjar() throws Throwable {
|
private static final String TEST_JAR_NAME = "testjar.jar";
|
||||||
|
private static final String CLASS_NAME = "Hello.class";
|
||||||
|
|
||||||
File outFile = new File(TEST_ROOT_DIR, "out");
|
@Test
|
||||||
// delete if output file already exists.
|
public void testRunjar() throws Throwable {
|
||||||
|
File outFile = new File(TEST_ROOT_DIR, "out");
|
||||||
|
// delete if output file already exists.
|
||||||
if (outFile.exists()) {
|
if (outFile.exists()) {
|
||||||
outFile.delete();
|
outFile.delete();
|
||||||
}
|
}
|
||||||
|
File makeTestJar = makeTestJar();
|
||||||
|
|
||||||
String[] args = new String[3];
|
String[] args = new String[3];
|
||||||
args[0] = "build/test/mapred/testjar/testjob.jar";
|
args[0] = makeTestJar.getAbsolutePath();
|
||||||
args[1] = "testjar.Hello";
|
args[1] = "org.apache.hadoop.util.Hello";
|
||||||
args[2] = outFile.toString();
|
args[2] = outFile.toString();
|
||||||
RunJar.main(args);
|
RunJar.main(args);
|
||||||
assertTrue("RunJar failed", outFile.exists());
|
Assert.assertTrue("RunJar failed", outFile.exists());
|
||||||
|
}
|
||||||
|
|
||||||
|
private File makeTestJar() throws IOException {
|
||||||
|
File jarFile = new File(TEST_ROOT_DIR, TEST_JAR_NAME);
|
||||||
|
JarOutputStream jstream = new JarOutputStream(new FileOutputStream(jarFile));
|
||||||
|
InputStream entryInputStream = this.getClass().getResourceAsStream(
|
||||||
|
CLASS_NAME);
|
||||||
|
ZipEntry entry = new ZipEntry("org/apache/hadoop/util/" + CLASS_NAME);
|
||||||
|
jstream.putNextEntry(entry);
|
||||||
|
BufferedInputStream bufInputStream = new BufferedInputStream(
|
||||||
|
entryInputStream, 2048);
|
||||||
|
int count;
|
||||||
|
byte[] data = new byte[2048];
|
||||||
|
while ((count = bufInputStream.read(data, 0, 2048)) != -1) {
|
||||||
|
jstream.write(data, 0, count);
|
||||||
|
}
|
||||||
|
jstream.closeEntry();
|
||||||
|
jstream.close();
|
||||||
|
|
||||||
|
return jarFile;
|
||||||
}
|
}
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user