MAPREDUCE-2384. The job submitter should make sure to validate jobs before creation of necessary files. (harsh)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1343240 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Harsh J 2012-05-28 13:14:14 +00:00
parent 4709160d75
commit 0037bf5dc5
2 changed files with 41 additions and 0 deletions

View File

@ -55,6 +55,9 @@ Trunk (unreleased changes)
MAPREDUCE-3302. Remove the last dependency call from
org.apache.hadoop.record package in MR. (harsh)
MAPREDUCE-2384. The job submitter should make sure to validate
jobs before creation of necessary files. (harsh)
BUG FIXES
MAPREDUCE-4100. [Gridmix] Bug fixed in compression emulation feature for

View File

@ -30,7 +30,11 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.mapreduce.tools.CLI;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
@ -65,6 +69,40 @@ public static int runTool(Configuration conf, Tool tool,
}
}
private static class BadOutputFormat
extends TextOutputFormat {
@Override
public void checkOutputSpecs(JobContext job)
throws FileAlreadyExistsException, IOException {
throw new IOException();
}
}
@Test
public void testJobSubmissionSpecsAndFiles() throws Exception {
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf,
getInputDir(), getOutputDir(), 1, 1);
job.setOutputFormatClass(BadOutputFormat.class);
try {
job.submit();
fail("Should've thrown an exception while checking output specs.");
} catch (Exception e) {
assertTrue(e instanceof IOException);
}
JobID jobId = job.getJobID();
Cluster cluster = new Cluster(conf);
Path jobStagingArea = JobSubmissionFiles.getStagingDir(
cluster,
job.getConfiguration());
Path submitJobDir = new Path(jobStagingArea, jobId.toString());
Path submitJobFile = JobSubmissionFiles.getJobConfPath(submitJobDir);
assertFalse(
"Shouldn't have created a job file if job specs failed.",
FileSystem.get(conf).exists(submitJobFile)
);
}
@Test
public void testJobClient() throws Exception {
Configuration conf = createJobConf();