MAPREDUCE-7192. JobHistoryServer attempts page support jump to containers log page in NM when logAggregation is disable. Contributed by Jiandan Yang.

This commit is contained in:
Weiwei Yang 2019-03-13 17:00:35 +08:00
parent 17a3e14d25
commit 159a715eef
3 changed files with 48 additions and 11 deletions

View File

@ -26,6 +26,7 @@
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
@ -46,8 +47,8 @@
public class HsAttemptsPage extends HsTaskPage { public class HsAttemptsPage extends HsTaskPage {
static class FewAttemptsBlock extends HsTaskPage.AttemptsBlock { static class FewAttemptsBlock extends HsTaskPage.AttemptsBlock {
@Inject @Inject
FewAttemptsBlock(App ctx) { FewAttemptsBlock(App ctx, Configuration conf) {
super(ctx); super(ctx, conf);
} }
/* /*

View File

@ -30,6 +30,7 @@
import java.util.Collection; import java.util.Collection;
import org.apache.commons.text.StringEscapeUtils; import org.apache.commons.text.StringEscapeUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
@ -38,6 +39,7 @@
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil; import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.util.Times; import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.SubView; import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet; import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet;
@ -61,10 +63,12 @@ public class HsTaskPage extends HsView {
*/ */
static class AttemptsBlock extends HtmlBlock { static class AttemptsBlock extends HtmlBlock {
final App app; final App app;
final Configuration conf;
@Inject @Inject
AttemptsBlock(App ctx) { AttemptsBlock(App ctx, Configuration conf) {
app = ctx; app = ctx;
this.conf = conf;
} }
@Override @Override
@ -152,13 +156,21 @@ protected void render(Block html) {
StringEscapeUtils.escapeHtml4(ta.getStatus()))).append("\",\"") StringEscapeUtils.escapeHtml4(ta.getStatus()))).append("\",\"")
.append("<a class='nodelink' href='" + MRWebAppUtil.getYARNWebappScheme() + nodeHttpAddr + "'>") .append("<a class='nodelink' href='" + MRWebAppUtil.getYARNWebappScheme() + nodeHttpAddr + "'>")
.append(nodeRackName + "/" + nodeHttpAddr + "</a>\",\"") .append(nodeRackName + "/" + nodeHttpAddr + "</a>\",\"");
.append("<a class='logslink' href='").append(url("logs", nodeIdString String logsUrl = url("logs", nodeIdString, containerIdString, taid,
, containerIdString, taid, app.getJob().getUserName())) app.getJob().getUserName());
.append("'>logs</a>\",\"") if (!conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED,
YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED)) {
logsUrl =
url(MRWebAppUtil.getYARNWebappScheme(), nodeHttpAddr, "node",
"containerlogs", containerIdString,
app.getJob().getUserName());
}
attemptsTableData.append("<a class='logslink' href='").append(logsUrl)
.append("'>logs</a>\",\"");
.append(attemptStartTime).append("\",\""); attemptsTableData.append(attemptStartTime).append("\",\"");
if(type == TaskType.REDUCE) { if(type == TaskType.REDUCE) {
attemptsTableData.append(shuffleFinishTime).append("\",\"") attemptsTableData.append(shuffleFinishTime).append("\",\"")

View File

@ -58,6 +58,7 @@
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.webapp.Controller.RequestContext; import org.apache.hadoop.yarn.webapp.Controller.RequestContext;
import org.apache.hadoop.yarn.webapp.View.ViewContext; import org.apache.hadoop.yarn.webapp.View.ViewContext;
import org.apache.hadoop.yarn.webapp.Controller; import org.apache.hadoop.yarn.webapp.Controller;
@ -193,7 +194,9 @@ public void testAttemptsBlock() {
when(job.getUserName()).thenReturn("User"); when(job.getUserName()).thenReturn("User");
app.setJob(job); app.setJob(job);
AttemptsBlockForTest block = new AttemptsBlockForTest(app); Configuration conf = new Configuration();
conf.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true);
AttemptsBlockForTest block = new AttemptsBlockForTest(app, conf);
block.addParameter(AMParams.TASK_TYPE, "r"); block.addParameter(AMParams.TASK_TYPE, "r");
PrintWriter pWriter = new PrintWriter(data); PrintWriter pWriter = new PrintWriter(data);
@ -212,6 +215,27 @@ public void testAttemptsBlock() {
assertTrue(data.toString().contains("100010")); assertTrue(data.toString().contains("100010"));
assertTrue(data.toString().contains("100011")); assertTrue(data.toString().contains("100011"));
assertTrue(data.toString().contains("100012")); assertTrue(data.toString().contains("100012"));
data.reset();
conf.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, false);
block = new AttemptsBlockForTest(app, conf);
block.addParameter(AMParams.TASK_TYPE, "r");
pWriter = new PrintWriter(data);
html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
block.render(html);
pWriter.flush();
// should be printed information about attempts
assertTrue(data.toString().contains("attempt_0_0001_r_000000_0"));
assertTrue(data.toString().contains("SUCCEEDED"));
assertFalse(data.toString().contains("Processed 128/128 records <p> \n"));
assertTrue(data.toString().contains("Processed 128\\/128 records &lt;p&gt; \\n"));
assertTrue(data.toString().contains(
"Node address:node:containerlogs:container_0_0005_01_000001:User:"));
assertTrue(data.toString().contains("100002"));
assertTrue(data.toString().contains("100010"));
assertTrue(data.toString().contains("100011"));
assertTrue(data.toString().contains("100012"));
} }
/** /**
@ -438,8 +462,8 @@ public void addParameter(String name, String value) {
return value == null ? defaultValue : value; return value == null ? defaultValue : value;
} }
public AttemptsBlockForTest(App ctx) { public AttemptsBlockForTest(App ctx, Configuration conf) {
super(ctx); super(ctx, conf);
} }
@Override @Override