MAPREDUCE-6273. HistoryFileManager should check whether summaryFile exists

to avoid FileNotFoundException causing HistoryFileInfo into MOVE_FAILED
state. Contributed by zhihai xu.
This commit is contained in:
Devaraj K 2015-05-15 12:31:32 +05:30
parent 3bef7c80a9
commit 1ea90144d5
3 changed files with 46 additions and 4 deletions

View File

@ -425,6 +425,10 @@ Release 2.8.0 - UNRELEASED
MAPREDUCE-5708. Duplicate String.format in YarnOutputFiles.getSpillFileForWrite.
(Konstantin Weitz via devaraj)
MAPREDUCE-6273. HistoryFileManager should check whether summaryFile exists to
avoid FileNotFoundException causing HistoryFileInfo into MOVE_FAILED state.
(zhihai xu via devaraj)
Release 2.7.1 - UNRELEASED
INCOMPATIBLE CHANGES

View File

@ -303,8 +303,9 @@ public class HistoryFileInfo {
private JobIndexInfo jobIndexInfo;
private HistoryInfoState state;
private HistoryFileInfo(Path historyFile, Path confFile, Path summaryFile,
JobIndexInfo jobIndexInfo, boolean isInDone) {
@VisibleForTesting
protected HistoryFileInfo(Path historyFile, Path confFile,
Path summaryFile, JobIndexInfo jobIndexInfo, boolean isInDone) {
this.historyFile = historyFile;
this.confFile = confFile;
this.summaryFile = summaryFile;
@ -337,7 +338,8 @@ public String toString() {
+ " historyFile = " + historyFile;
}
private synchronized void moveToDone() throws IOException {
@VisibleForTesting
synchronized void moveToDone() throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("moveToDone: " + historyFile);
}
@ -368,7 +370,8 @@ private synchronized void moveToDone() throws IOException {
paths.add(confFile);
}
if (summaryFile == null) {
if (summaryFile == null || !intermediateDoneDirFc.util().exists(
summaryFile)) {
LOG.info("No summary file for job: " + jobId);
} else {
String jobSummaryString = getJobSummary(intermediateDoneDirFc,

View File

@ -35,7 +35,12 @@
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo;
import org.apache.hadoop.test.CoreTestDriver;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
@ -221,4 +226,34 @@ public void testScanDirectory() throws Exception {
}
@Test
public void testHistoryFileInfoSummaryFileNotExist() throws Exception {
HistoryFileManagerTest hmTest = new HistoryFileManagerTest();
String job = "job_1410889000000_123456";
Path summaryFile = new Path(job + ".summary");
JobIndexInfo jobIndexInfo = new JobIndexInfo();
jobIndexInfo.setJobId(TypeConverter.toYarn(JobID.forName(job)));
Configuration conf = dfsCluster.getConfiguration(0);
conf.set(JHAdminConfig.MR_HISTORY_DONE_DIR,
"/" + UUID.randomUUID());
conf.set(JHAdminConfig.MR_HISTORY_INTERMEDIATE_DONE_DIR,
"/" + UUID.randomUUID());
hmTest.serviceInit(conf);
HistoryFileInfo info = hmTest.getHistoryFileInfo(null, null,
summaryFile, jobIndexInfo, false);
info.moveToDone();
Assert.assertFalse(info.didMoveFail());
}
static class HistoryFileManagerTest extends HistoryFileManager {
public HistoryFileManagerTest() {
super();
}
public HistoryFileInfo getHistoryFileInfo(Path historyFile,
Path confFile, Path summaryFile, JobIndexInfo jobIndexInfo,
boolean isInDone) {
return new HistoryFileInfo(historyFile, confFile, summaryFile,
jobIndexInfo, isInDone);
}
}
}