MAPREDUCE-4802. Takes a long time to load the task list on the AM for large jobs (Ravi Prakash via bobby)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1410498 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
f13da263bf
commit
d2b9a604bd
@ -587,6 +587,9 @@ Release 0.23.6 - UNRELEASED
|
|||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
|
||||||
|
MAPREDUCE-4802. Takes a long time to load the task list on the AM for
|
||||||
|
large jobs (Ravi Prakash via bobby)
|
||||||
|
|
||||||
Release 0.23.5 - UNRELEASED
|
Release 0.23.5 - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -21,15 +21,13 @@
|
|||||||
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_TYPE;
|
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_TYPE;
|
||||||
import static org.apache.hadoop.yarn.util.StringHelper.join;
|
import static org.apache.hadoop.yarn.util.StringHelper.join;
|
||||||
import static org.apache.hadoop.yarn.util.StringHelper.percent;
|
import static org.apache.hadoop.yarn.util.StringHelper.percent;
|
||||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR;
|
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR;
|
||||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR_VALUE;
|
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR_VALUE;
|
||||||
|
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
|
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
|
||||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
|
||||||
import org.apache.hadoop.yarn.util.Times;
|
|
||||||
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
|
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
|
||||||
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
|
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
|
||||||
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
|
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
|
||||||
@ -66,6 +64,8 @@ public class TasksBlock extends HtmlBlock {
|
|||||||
th("Finish Time").
|
th("Finish Time").
|
||||||
th("Elapsed Time")._()._().
|
th("Elapsed Time")._()._().
|
||||||
tbody();
|
tbody();
|
||||||
|
StringBuilder tasksTableData = new StringBuilder("[\n");
|
||||||
|
|
||||||
for (Task task : app.getJob().getTasks().values()) {
|
for (Task task : app.getJob().getTasks().values()) {
|
||||||
if (type != null && task.getType() != type) {
|
if (type != null && task.getType() != type) {
|
||||||
continue;
|
continue;
|
||||||
@ -73,31 +73,28 @@ public class TasksBlock extends HtmlBlock {
|
|||||||
TaskInfo info = new TaskInfo(task);
|
TaskInfo info = new TaskInfo(task);
|
||||||
String tid = info.getId();
|
String tid = info.getId();
|
||||||
String pct = percent(info.getProgress() / 100);
|
String pct = percent(info.getProgress() / 100);
|
||||||
long startTime = info.getStartTime();
|
tasksTableData.append("[\"<a href='").append(url("task", tid))
|
||||||
long finishTime = info.getFinishTime();
|
.append("'>").append(tid).append("</a>\",\"")
|
||||||
long elapsed = info.getElapsedTime();
|
//Progress bar
|
||||||
tbody.
|
.append("<br title='").append(pct)
|
||||||
tr().
|
.append("'> <div class='").append(C_PROGRESSBAR).append("' title='")
|
||||||
td().
|
.append(join(pct, '%')).append("'> ").append("<div class='")
|
||||||
br().$title(String.valueOf(info.getTaskNum()))._(). // sorting
|
.append(C_PROGRESSBAR_VALUE).append("' style='")
|
||||||
a(url("task", tid), tid)._().
|
.append(join("width:", pct, '%')).append("'> </div> </div>\",\"")
|
||||||
td().
|
|
||||||
br().$title(pct)._().
|
.append(info.getState()).append("\",\"")
|
||||||
div(_PROGRESSBAR).
|
.append(info.getStartTime()).append("\",\"")
|
||||||
$title(join(pct, '%')). // tooltip
|
.append(info.getFinishTime()).append("\",\"")
|
||||||
div(_PROGRESSBAR_VALUE).
|
.append(info.getElapsedTime()).append("\"],\n");
|
||||||
$style(join("width:", pct, '%'))._()._()._().
|
|
||||||
td(info.getState()).
|
|
||||||
td().
|
|
||||||
br().$title(String.valueOf(startTime))._().
|
|
||||||
_(Times.format(startTime))._().
|
|
||||||
td().
|
|
||||||
br().$title(String.valueOf(finishTime))._().
|
|
||||||
_(Times.format(finishTime))._().
|
|
||||||
td().
|
|
||||||
br().$title(String.valueOf(elapsed))._().
|
|
||||||
_(StringUtils.formatTime(elapsed))._()._();
|
|
||||||
}
|
}
|
||||||
|
//Remove the last comma and close off the array of arrays
|
||||||
|
if(tasksTableData.charAt(tasksTableData.length() - 2) == ',') {
|
||||||
|
tasksTableData.delete(tasksTableData.length()-2, tasksTableData.length()-1);
|
||||||
|
}
|
||||||
|
tasksTableData.append("]");
|
||||||
|
html.script().$type("text/javascript").
|
||||||
|
_("var tasksTableData=" + tasksTableData)._();
|
||||||
|
|
||||||
tbody._()._();
|
tbody._()._();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -37,11 +37,26 @@ public class TasksPage extends AppView {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private String tasksTableInit() {
|
private String tasksTableInit() {
|
||||||
return tableInit().
|
return tableInit()
|
||||||
|
.append(", 'aaData': tasksTableData")
|
||||||
|
.append(", bDeferRender: true")
|
||||||
|
.append(", bProcessing: true")
|
||||||
|
|
||||||
|
.append("\n, aoColumnDefs: [\n")
|
||||||
|
.append("{'sType':'numeric', 'aTargets': [0]")
|
||||||
|
.append(", 'mRender': parseHadoopID }")
|
||||||
|
|
||||||
|
.append("\n, {'sType':'numeric', bSearchable:false, 'aTargets': [1]")
|
||||||
|
.append(", 'mRender': parseHadoopProgress }")
|
||||||
|
|
||||||
|
|
||||||
|
.append("\n, {'sType':'numeric', 'aTargets': [3, 4]")
|
||||||
|
.append(", 'mRender': renderHadoopDate }")
|
||||||
|
|
||||||
|
.append("\n, {'sType':'numeric', 'aTargets': [5]")
|
||||||
|
.append(", 'mRender': renderHadoopElapsedTime }]")
|
||||||
|
|
||||||
// Sort by id upon page load
|
// Sort by id upon page load
|
||||||
append(", aaSorting: [[0, 'asc']]").
|
.append(", aaSorting: [[0, 'asc']] }").toString();
|
||||||
append(",aoColumns:[{sType:'title-numeric'},{sType:'title-numeric',").
|
|
||||||
append("bSearchable:false},null,{sType:'title-numeric'},").
|
|
||||||
append("{sType:'title-numeric'},{sType:'title-numeric'}]}").toString();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -78,11 +78,11 @@ public class HsJobsBlock extends HtmlBlock {
|
|||||||
.append(dateFormat.format(new Date(job.getFinishTime()))).append("\",\"")
|
.append(dateFormat.format(new Date(job.getFinishTime()))).append("\",\"")
|
||||||
.append("<a href='").append(url("job", job.getId())).append("'>")
|
.append("<a href='").append(url("job", job.getId())).append("'>")
|
||||||
.append(job.getId()).append("</a>\",\"")
|
.append(job.getId()).append("</a>\",\"")
|
||||||
.append(StringEscapeUtils.escapeJavaScript(job.getName()))
|
.append(StringEscapeUtils.escapeHtml(job.getName()))
|
||||||
.append("\",\"")
|
.append("\",\"")
|
||||||
.append(StringEscapeUtils.escapeJavaScript(job.getUserName()))
|
.append(StringEscapeUtils.escapeHtml(job.getUserName()))
|
||||||
.append("\",\"")
|
.append("\",\"")
|
||||||
.append(StringEscapeUtils.escapeJavaScript(job.getQueueName()))
|
.append(StringEscapeUtils.escapeHtml(job.getQueueName()))
|
||||||
.append("\",\"")
|
.append("\",\"")
|
||||||
.append(job.getState()).append("\",\"")
|
.append(job.getState()).append("\",\"")
|
||||||
.append(String.valueOf(job.getMapsTotal())).append("\",\"")
|
.append(String.valueOf(job.getMapsTotal())).append("\",\"")
|
||||||
|
@ -29,7 +29,6 @@
|
|||||||
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
|
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
|
||||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.yarn.util.Times;
|
|
||||||
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
|
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
|
||||||
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
|
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
|
||||||
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
|
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
|
||||||
|
@ -58,10 +58,14 @@ jQuery.fn.dataTableExt.oApi.fnSetFilteringDelay = function ( oSettings, iDelay )
|
|||||||
|
|
||||||
function renderHadoopDate(data, type, full) {
|
function renderHadoopDate(data, type, full) {
|
||||||
if (type === 'display') {
|
if (type === 'display') {
|
||||||
|
if(data === '0') {
|
||||||
|
return "N/A";
|
||||||
|
}
|
||||||
return new Date(parseInt(data)).toUTCString();
|
return new Date(parseInt(data)).toUTCString();
|
||||||
}
|
}
|
||||||
// 'filter', 'sort', 'type' and undefined all just use the number
|
// 'filter', 'sort', 'type' and undefined all just use the number
|
||||||
return data;
|
// If date is 0, then for purposes of sorting it should be consider max_int
|
||||||
|
return data === '0' ? '9007199254740992' : data;
|
||||||
}
|
}
|
||||||
|
|
||||||
function renderHadoopElapsedTime(data, type, full) {
|
function renderHadoopElapsedTime(data, type, full) {
|
||||||
|
Loading…
Reference in New Issue
Block a user