diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index ee842e0443..8180d24d69 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -500,6 +500,9 @@ Release 0.23.3 - UNRELEASED MAPREDUCE-4238. mavenize data_join. (tgraves) + MAPREDUCE-4102. job counters not available in Jobhistory webui for + killed jobs (Bhallamudi Venkata Siva Kamesh via tgraves) + Release 0.23.2 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/CountersBlock.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/CountersBlock.java index ec02ef5e89..248713cecb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/CountersBlock.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/CountersBlock.java @@ -69,7 +69,7 @@ public class CountersBlock extends HtmlBlock { return; } - if(total == null || total.getGroupNames() == null) { + if(total == null || total.getGroupNames() == null || total.countCounters() == 0) { String type = $(TASK_ID); if(type == null || type.isEmpty()) { type = $(JOB_ID, "the job"); @@ -180,14 +180,25 @@ private void getCounters(AppContext ctx) { // Get all types of counters Map tasks = job.getTasks(); total = job.getAllCounters(); + boolean needTotalCounters = false; + if (total == null) { + total = new Counters(); + needTotalCounters = true; + } map = new Counters(); reduce = new Counters(); for (Task t : tasks.values()) { Counters counters = t.getCounters(); + if (counters == null) { + continue; + } switch (t.getType()) { case MAP: map.incrAllCounters(counters); break; case REDUCE: reduce.incrAllCounters(counters); break; } + if (needTotalCounters) { + total.incrAllCounters(counters); + } } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java index 6dbc9189b7..8d5c46992c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java @@ -81,6 +81,9 @@ private void getCounters(AppContext ctx, Job job) { Map tasks = job.getTasks(); for (Task t : tasks.values()) { Counters counters = t.getCounters(); + if (counters == null) { + continue; + } total.incrAllCounters(counters); switch (t.getType()) { case MAP: diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java index dd57408014..81af358bc2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java @@ -19,6 +19,7 @@ package org.apache.hadoop.mapreduce.v2.app; import java.io.IOException; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -131,6 +132,17 @@ public static Map newJobs(ApplicationId appID, int numJobsPerApp, } return map; } + + public static Map newJobs(ApplicationId appID, int numJobsPerApp, + int numTasksPerJob, int numAttemptsPerTask, boolean hasFailedTasks) { + Map map = Maps.newHashMap(); + for (int j = 0; j < numJobsPerApp; ++j) { + Job job = newJob(appID, j, numTasksPerJob, numAttemptsPerTask, null, + hasFailedTasks); + map.put(job.getID(), job); + } + return map; + } public static JobId newJobID(ApplicationId appID, int i) { JobId id = Records.newRecord(JobId.class); @@ -316,16 +328,16 @@ public String getNodeRackName() { }; } - public static Map newTasks(JobId jid, int n, int m) { + public static Map newTasks(JobId jid, int n, int m, boolean hasFailedTasks) { Map map = Maps.newHashMap(); for (int i = 0; i < n; ++i) { - Task task = newTask(jid, i, m); + Task task = newTask(jid, i, m, hasFailedTasks); map.put(task.getID(), task); } return map; } - public static Task newTask(JobId jid, int i, int m) { + public static Task newTask(JobId jid, int i, int m, final boolean hasFailedTasks) { final TaskId tid = Records.newRecord(TaskId.class); tid.setJobId(jid); tid.setId(i); @@ -345,6 +357,9 @@ public TaskReport getReport() { @Override public Counters getCounters() { + if (hasFailedTasks) { + return null; + } return new Counters( TypeConverter.fromYarn(report.getCounters())); } @@ -394,8 +409,14 @@ public TaskState getState() { public static Counters getCounters( Collection tasks) { + List completedTasks = new ArrayList(); + for (Task task : tasks) { + if (task.getCounters() != null) { + completedTasks.add(task); + } + } Counters counters = new Counters(); - return JobImpl.incrTaskCounters(counters, tasks); + return JobImpl.incrTaskCounters(counters, completedTasks); } static class TaskCount { @@ -434,10 +455,15 @@ public static Job newJob(ApplicationId appID, int i, int n, int m) { } public static Job newJob(ApplicationId appID, int i, int n, int m, Path confFile) { + return newJob(appID, i, n, m, confFile, false); + } + + public static Job newJob(ApplicationId appID, int i, int n, int m, + Path confFile, boolean hasFailedTasks) { final JobId id = newJobID(appID, i); final String name = newJobName(); final JobReport report = newJobReport(id); - final Map tasks = newTasks(id, n, m); + final Map tasks = newTasks(id, n, m, hasFailedTasks); final TaskCount taskCount = getTaskCount(tasks.values()); final Counters counters = getCounters(tasks .values()); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/MockHistoryJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/MockHistoryJobs.java index b8b64a6d91..74ca32c98b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/MockHistoryJobs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/MockHistoryJobs.java @@ -43,6 +43,14 @@ public static JobsPair newHistoryJobs(ApplicationId appID, int numJobsPerApp, numAttemptsPerTask); return split(mocked); } + + public static JobsPair newHistoryJobs(ApplicationId appID, int numJobsPerApp, + int numTasksPerJob, int numAttemptsPerTask, boolean hasFailedTasks) + throws IOException { + Map mocked = newJobs(appID, numJobsPerApp, numTasksPerJob, + numAttemptsPerTask, hasFailedTasks); + return split(mocked); + } private static JobsPair split(Map mocked) throws IOException { JobsPair ret = new JobsPair(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java index 4ef4d3ea07..0fb1f7544f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java @@ -63,10 +63,16 @@ static class TestAppContext implements AppContext { final Map jobs; final long startTime = System.currentTimeMillis(); - TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) { + TestAppContext(int appid, int numJobs, int numTasks, int numAttempts, + boolean hasFailedTasks) { appID = MockJobs.newAppID(appid); appAttemptID = MockJobs.newAppAttemptID(appID, 0); - jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts); + jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts, + hasFailedTasks); + } + + TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) { + this(appid, numJobs, numTasks, numAttempts, false); } TestAppContext() { @@ -198,6 +204,14 @@ public void testTaskView() { appContext, params); } + @Test public void testJobCounterViewForKilledJob() { + LOG.info("JobCounterViewForKilledJob"); + AppContext appContext = new TestAppContext(0, 1, 1, 1, true); + Map params = TestAMWebApp.getJobParams(appContext); + WebAppTests.testPage(HsCountersPage.class, AppContext.class, + appContext, params); + } + @Test public void testSingleCounterView() { LOG.info("HsSingleCounterPage"); WebAppTests.testPage(HsSingleCounterPage.class, AppContext.class, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java index 0452406231..2ce6c5d5ea 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java @@ -101,13 +101,15 @@ static class TestAppContext implements HistoryContext { final Map partialJobs; final Map fullJobs; final long startTime = System.currentTimeMillis(); - - TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) { + + TestAppContext(int appid, int numJobs, int numTasks, int numAttempts, + boolean hasFailedTasks) { appID = MockJobs.newAppID(appid); appAttemptID = MockJobs.newAppAttemptID(appID, 0); JobsPair jobs; try { - jobs = MockHistoryJobs.newHistoryJobs(appID, numJobs, numTasks, numAttempts); + jobs = MockHistoryJobs.newHistoryJobs(appID, numJobs, numTasks, + numAttempts, hasFailedTasks); } catch (IOException e) { throw new YarnException(e); } @@ -115,6 +117,10 @@ static class TestAppContext implements HistoryContext { fullJobs = jobs.full; } + TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) { + this(appid, numJobs, numTasks, numAttempts, false); + } + TestAppContext() { this(0, 1, 2, 1); } @@ -628,6 +634,46 @@ public void testJobCountersSlash() throws JSONException, Exception { verifyHsJobCounters(info, jobsMap.get(id)); } } + + @Test + public void testJobCountersForKilledJob() throws Exception { + WebResource r = resource(); + appContext = new TestAppContext(0, 1, 1, 1, true); + injector = Guice.createInjector(new ServletModule() { + @Override + protected void configureServlets() { + + webApp = mock(HsWebApp.class); + when(webApp.name()).thenReturn("hsmockwebapp"); + + bind(JAXBContextResolver.class); + bind(HsWebServices.class); + bind(GenericExceptionHandler.class); + bind(WebApp.class).toInstance(webApp); + bind(AppContext.class).toInstance(appContext); + bind(HistoryContext.class).toInstance(appContext); + bind(Configuration.class).toInstance(conf); + + serve("/*").with(GuiceContainer.class); + } + }); + + Map jobsMap = appContext.getAllJobs(); + for (JobId id : jobsMap.keySet()) { + String jobId = MRApps.toString(id); + + ClientResponse response = r.path("ws").path("v1").path("history") + .path("mapreduce").path("jobs").path(jobId).path("counters/") + .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + JSONObject json = response.getEntity(JSONObject.class); + assertEquals("incorrect number of elements", 1, json.length()); + JSONObject info = json.getJSONObject("jobCounters"); + WebServicesTestUtils.checkStringMatch("id", MRApps.toString(id), + info.getString("id")); + assertTrue("Job shouldn't contain any counters", info.length() == 1); + } + } @Test public void testJobCountersDefault() throws JSONException, Exception {