MAPREDUCE-5815. Fixed test-failure of TestMRAppMaster by making MRAppMaster gracefully handle empty-queue names. Contributed by Akira Ajisaka.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1586559 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2014-04-11 04:01:39 +00:00
parent f8904ad299
commit 8d41b363b8
4 changed files with 30 additions and 6 deletions

View File

@ -189,6 +189,9 @@ Release 2.4.1 - UNRELEASED
MAPREDUCE-5824. Fixed test-failure of TestPipesNonJavaInputFormat in
Windows. (Xuan Gong via vinodkv)
MAPREDUCE-5815. Fixed test-failure of TestMRAppMaster by making MRAppMaster
gracefully handle empty-queue names. (Akira Ajisaka via vinodkv)
Release 2.4.0 - 2014-04-07
INCOMPATIBLE CHANGES

View File

@ -40,6 +40,7 @@
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.JobCounter;
@ -442,8 +443,13 @@ protected void setupEventWriter(JobId jobId, String forcedJobStateOnShutDown)
}
}
String queueName = JobConf.DEFAULT_QUEUE_NAME;
if (conf != null) {
queueName = conf.get(MRJobConfig.QUEUE_NAME, JobConf.DEFAULT_QUEUE_NAME);
}
MetaInfo fi = new MetaInfo(historyFile, logDirConfPath, writer,
user, jobName, jobId, forcedJobStateOnShutDown);
user, jobName, jobId, forcedJobStateOnShutDown, queueName);
fi.getJobSummary().setJobId(jobId);
fileMap.put(jobId, fi);
}
@ -816,12 +822,14 @@ protected class MetaInfo {
private String forcedJobStateOnShutDown;
MetaInfo(Path historyFile, Path conf, EventWriter writer, String user,
String jobName, JobId jobId, String forcedJobStateOnShutDown) {
String jobName, JobId jobId, String forcedJobStateOnShutDown,
String queueName) {
this.historyFile = historyFile;
this.confFile = conf;
this.writer = writer;
this.jobIndexInfo =
new JobIndexInfo(-1, -1, user, jobName, jobId, -1, -1, null);
new JobIndexInfo(-1, -1, user, jobName, jobId, -1, -1, null,
queueName);
this.jobSummary = new JobSummary();
this.flushTimer = new Timer("FlushTimer", true);
this.forcedJobStateOnShutDown = forcedJobStateOnShutDown;

View File

@ -94,7 +94,7 @@ public static String getDoneFileName(JobIndexInfo indexInfo) throws IOException
sb.append(DELIMITER);
//QueueName
sb.append(escapeDelimiters(indexInfo.getQueueName()));
sb.append(escapeDelimiters(getQueueName(indexInfo)));
sb.append(DELIMITER);
//JobStartTime
@ -262,6 +262,10 @@ private static String getJobName(JobIndexInfo indexInfo) {
return getNonEmptyString(indexInfo.getJobName());
}
private static String getQueueName(JobIndexInfo indexInfo) {
return getNonEmptyString(indexInfo.getQueueName());
}
//TODO Maybe handle default values for longs and integers here?
private static String getNonEmptyString(String in) {

View File

@ -18,10 +18,11 @@
package org.apache.hadoop.mapreduce.v2.jobhistory;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
/**
* Maintains information which may be used by the jobHistroy indexing
* Maintains information which may be used by the jobHistory indexing
* system.
*/
public class JobIndexInfo {
@ -41,6 +42,13 @@ public JobIndexInfo() {
public JobIndexInfo(long submitTime, long finishTime, String user,
String jobName, JobId jobId, int numMaps, int numReduces, String jobStatus) {
this(submitTime, finishTime, user, jobName, jobId, numMaps, numReduces,
jobStatus, JobConf.DEFAULT_QUEUE_NAME);
}
public JobIndexInfo(long submitTime, long finishTime, String user,
String jobName, JobId jobId, int numMaps, int numReduces,
String jobStatus, String queueName) {
this.submitTime = submitTime;
this.finishTime = finishTime;
this.user = user;
@ -50,6 +58,7 @@ public JobIndexInfo(long submitTime, long finishTime, String user,
this.numReduces = numReduces;
this.jobStatus = jobStatus;
this.jobStartTime = -1;
this.queueName = queueName;
}
public long getSubmitTime() {