MAPREDUCE-3197. TestMRClientService failing on building clean checkout of branch 0.23 (mahadev)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1185486 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
c1d90772b6
commit
9ff7c1b1de
@ -1657,6 +1657,9 @@ Release 0.23.0 - Unreleased
|
|||||||
MAPREDUCE-3196. TestLinuxContainerExecutorWithMocks fails on Mac OSX.
|
MAPREDUCE-3196. TestLinuxContainerExecutorWithMocks fails on Mac OSX.
|
||||||
(Arun Murthy via mahadev)
|
(Arun Murthy via mahadev)
|
||||||
|
|
||||||
|
MAPREDUCE-3197. TestMRClientService failing on building clean checkout of
|
||||||
|
branch 0.23 (mahadev)
|
||||||
|
|
||||||
Release 0.22.0 - Unreleased
|
Release 0.22.0 - Unreleased
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -38,7 +38,6 @@
|
|||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
|
import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.client.MRClientService;
|
import org.apache.hadoop.mapreduce.v2.app.client.MRClientService;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
@ -49,10 +48,8 @@
|
|||||||
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
|
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent;
|
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus;
|
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus;
|
||||||
import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
|
|
||||||
import org.apache.hadoop.yarn.factories.RecordFactory;
|
import org.apache.hadoop.yarn.factories.RecordFactory;
|
||||||
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
||||||
import org.apache.hadoop.yarn.ipc.RPCUtil;
|
|
||||||
import org.apache.hadoop.yarn.ipc.YarnRPC;
|
import org.apache.hadoop.yarn.ipc.YarnRPC;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
@ -150,14 +147,12 @@ public void test() throws Exception {
|
|||||||
proxy.getTaskReports(gtreportsRequest).getTaskReportList());
|
proxy.getTaskReports(gtreportsRequest).getTaskReportList());
|
||||||
|
|
||||||
List<String> diag = proxy.getDiagnostics(gdRequest).getDiagnosticsList();
|
List<String> diag = proxy.getDiagnostics(gdRequest).getDiagnosticsList();
|
||||||
Assert.assertEquals("Num diagnostics not correct", 2 , diag.size());
|
Assert.assertEquals("Num diagnostics not correct", 1 , diag.size());
|
||||||
Assert.assertEquals("Diag 1 not correct",
|
Assert.assertEquals("Diag 1 not correct",
|
||||||
diagnostic1, diag.get(0).toString());
|
diagnostic1, diag.get(0).toString());
|
||||||
Assert.assertEquals("Diag 2 not correct",
|
|
||||||
diagnostic2, diag.get(1).toString());
|
|
||||||
|
|
||||||
TaskReport taskReport = proxy.getTaskReport(gtrRequest).getTaskReport();
|
TaskReport taskReport = proxy.getTaskReport(gtrRequest).getTaskReport();
|
||||||
Assert.assertEquals("Num diagnostics not correct", 2,
|
Assert.assertEquals("Num diagnostics not correct", 1,
|
||||||
taskReport.getDiagnosticsCount());
|
taskReport.getDiagnosticsCount());
|
||||||
|
|
||||||
//send the done signal to the task
|
//send the done signal to the task
|
||||||
|
Loading…
Reference in New Issue
Block a user