MAPREDUCE-6649. getFailureInfo not returning any failure info. Contributed by Eric Badger
This commit is contained in:
parent
cab9cbaa0a
commit
cc8b83a8e8
@ -142,6 +142,7 @@ private void constructJobReport() {
|
||||
report.setFinishTime(jobInfo.getFinishTime());
|
||||
report.setJobName(jobInfo.getJobname());
|
||||
report.setUser(jobInfo.getUsername());
|
||||
report.setDiagnostics(jobInfo.getErrorInfo());
|
||||
|
||||
if ( getTotalMaps() == 0 ) {
|
||||
report.setMapProgress(1.0f);
|
||||
@ -335,6 +336,12 @@ private void loadAllTasks() {
|
||||
}
|
||||
}
|
||||
|
||||
protected JobHistoryParser createJobHistoryParser(Path historyFileAbsolute)
|
||||
throws IOException {
|
||||
return new JobHistoryParser(historyFileAbsolute.getFileSystem(conf),
|
||||
historyFileAbsolute);
|
||||
}
|
||||
|
||||
//History data is leisurely loaded when task level data is requested
|
||||
protected synchronized void loadFullHistoryData(boolean loadTasks,
|
||||
Path historyFileAbsolute) throws IOException {
|
||||
@ -347,7 +354,7 @@ protected synchronized void loadFullHistoryData(boolean loadTasks,
|
||||
JobHistoryParser parser = null;
|
||||
try {
|
||||
final FileSystem fs = historyFileAbsolute.getFileSystem(conf);
|
||||
parser = new JobHistoryParser(fs, historyFileAbsolute);
|
||||
parser = createJobHistoryParser(historyFileAbsolute);
|
||||
final Path jobConfPath = new Path(historyFileAbsolute.getParent(),
|
||||
JobHistoryUtils.getIntermediateConfFileName(jobId));
|
||||
final Configuration conf = new Configuration();
|
||||
|
@ -19,14 +19,18 @@
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.mapred.JobACLsManager;
|
||||
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
|
||||
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
|
||||
@ -236,4 +240,27 @@ public void testGetTaskAttemptCompletionEvent() throws Exception{
|
||||
|
||||
}
|
||||
|
||||
@Test (timeout=30000)
|
||||
public void testCompletedJobWithDiagnostics() throws Exception {
|
||||
final String jobError = "Job Diagnostics";
|
||||
JobInfo jobInfo = spy(new JobInfo());
|
||||
when(jobInfo.getErrorInfo()).thenReturn(jobError);
|
||||
when(jobInfo.getJobStatus()).thenReturn(JobState.FAILED.toString());
|
||||
when(jobInfo.getAMInfos()).thenReturn(Collections.<JobHistoryParser.AMInfo>emptyList());
|
||||
final JobHistoryParser mockParser = mock(JobHistoryParser.class);
|
||||
when(mockParser.parse()).thenReturn(jobInfo);
|
||||
HistoryFileInfo info = mock(HistoryFileInfo.class);
|
||||
when(info.getConfFile()).thenReturn(fullConfPath);
|
||||
when(info.getHistoryFile()).thenReturn(fullHistoryPath);
|
||||
CompletedJob job =
|
||||
new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user",
|
||||
info, jobAclsManager) {
|
||||
@Override
|
||||
protected JobHistoryParser createJobHistoryParser(
|
||||
Path historyFileAbsolute) throws IOException {
|
||||
return mockParser;
|
||||
}
|
||||
};
|
||||
assertEquals(jobError, job.getReport().getDiagnostics());
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user