YARN-4297. TestJobHistoryEventHandler and TestRMContainerAllocator failing on YARN-2928 branch (Varun Saxena via sjlee)
This commit is contained in:
parent
51254a6b51
commit
ae72f1dc77
@ -54,6 +54,7 @@
|
|||||||
import org.apache.hadoop.mapreduce.util.JobHistoryEventUtils;
|
import org.apache.hadoop.mapreduce.util.JobHistoryEventUtils;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MRAppMaster.RunningAppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
|
import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
|
||||||
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
|
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
|
||||||
@ -65,6 +66,7 @@
|
|||||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||||
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
|
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
|
||||||
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
|
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
|
||||||
|
import org.apache.hadoop.yarn.client.api.TimelineClient;
|
||||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||||
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
|
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
|
||||||
import org.apache.hadoop.yarn.server.MiniYARNCluster;
|
import org.apache.hadoop.yarn.server.MiniYARNCluster;
|
||||||
@ -486,7 +488,7 @@ public void testAMStartedEvent() throws Exception {
|
|||||||
// stored to the Timeline store
|
// stored to the Timeline store
|
||||||
@Test (timeout=50000)
|
@Test (timeout=50000)
|
||||||
public void testTimelineEventHandling() throws Exception {
|
public void testTimelineEventHandling() throws Exception {
|
||||||
TestParams t = new TestParams(false);
|
TestParams t = new TestParams(RunningAppContext.class, false);
|
||||||
Configuration conf = new YarnConfiguration();
|
Configuration conf = new YarnConfiguration();
|
||||||
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
|
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
|
||||||
MiniYARNCluster yarnCluster = null;
|
MiniYARNCluster yarnCluster = null;
|
||||||
@ -741,21 +743,30 @@ private String setupTestWorkDir() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private AppContext mockAppContext(ApplicationId appId, boolean isLastAMRetry) {
|
private Job mockJob() {
|
||||||
JobId jobId = TypeConverter.toYarn(TypeConverter.fromYarn(appId));
|
|
||||||
AppContext mockContext = mock(AppContext.class);
|
|
||||||
Job mockJob = mock(Job.class);
|
Job mockJob = mock(Job.class);
|
||||||
when(mockJob.getAllCounters()).thenReturn(new Counters());
|
when(mockJob.getAllCounters()).thenReturn(new Counters());
|
||||||
when(mockJob.getTotalMaps()).thenReturn(10);
|
when(mockJob.getTotalMaps()).thenReturn(10);
|
||||||
when(mockJob.getTotalReduces()).thenReturn(10);
|
when(mockJob.getTotalReduces()).thenReturn(10);
|
||||||
when(mockJob.getName()).thenReturn("mockjob");
|
when(mockJob.getName()).thenReturn("mockjob");
|
||||||
|
return mockJob;
|
||||||
|
}
|
||||||
|
|
||||||
|
private AppContext mockAppContext(Class<? extends AppContext> contextClass,
|
||||||
|
ApplicationId appId, boolean isLastAMRetry) {
|
||||||
|
JobId jobId = TypeConverter.toYarn(TypeConverter.fromYarn(appId));
|
||||||
|
AppContext mockContext = mock(contextClass);
|
||||||
|
Job mockJob = mockJob();
|
||||||
when(mockContext.getJob(jobId)).thenReturn(mockJob);
|
when(mockContext.getJob(jobId)).thenReturn(mockJob);
|
||||||
when(mockContext.getApplicationID()).thenReturn(appId);
|
when(mockContext.getApplicationID()).thenReturn(appId);
|
||||||
when(mockContext.isLastAMRetry()).thenReturn(isLastAMRetry);
|
when(mockContext.isLastAMRetry()).thenReturn(isLastAMRetry);
|
||||||
|
if (mockContext instanceof RunningAppContext) {
|
||||||
|
when(((RunningAppContext)mockContext).getTimelineClient()).
|
||||||
|
thenReturn(TimelineClient.createTimelineClient());
|
||||||
|
}
|
||||||
return mockContext;
|
return mockContext;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private class TestParams {
|
private class TestParams {
|
||||||
boolean isLastAMRetry;
|
boolean isLastAMRetry;
|
||||||
String workDir = setupTestWorkDir();
|
String workDir = setupTestWorkDir();
|
||||||
@ -770,11 +781,14 @@ private class TestParams {
|
|||||||
AppContext mockAppContext;
|
AppContext mockAppContext;
|
||||||
|
|
||||||
public TestParams() {
|
public TestParams() {
|
||||||
this(false);
|
this(AppContext.class, false);
|
||||||
}
|
}
|
||||||
public TestParams(boolean isLastAMRetry) {
|
public TestParams(boolean isLastAMRetry) {
|
||||||
|
this(AppContext.class, isLastAMRetry);
|
||||||
|
}
|
||||||
|
public TestParams(Class<? extends AppContext> contextClass, boolean isLastAMRetry) {
|
||||||
this.isLastAMRetry = isLastAMRetry;
|
this.isLastAMRetry = isLastAMRetry;
|
||||||
mockAppContext = mockAppContext(appId, this.isLastAMRetry);
|
mockAppContext = mockAppContext(contextClass, appId, this.isLastAMRetry);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -59,6 +59,7 @@
|
|||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.ClusterInfo;
|
import org.apache.hadoop.mapreduce.v2.app.ClusterInfo;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.MRApp;
|
import org.apache.hadoop.mapreduce.v2.app.MRApp;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MRAppMaster.RunningAppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
|
import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
|
import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
|
||||||
@ -1894,7 +1895,7 @@ private static class MyContainerAllocator extends RMContainerAllocator {
|
|||||||
private AllocateResponse allocateResponse;
|
private AllocateResponse allocateResponse;
|
||||||
private static AppContext createAppContext(
|
private static AppContext createAppContext(
|
||||||
ApplicationAttemptId appAttemptId, Job job) {
|
ApplicationAttemptId appAttemptId, Job job) {
|
||||||
AppContext context = mock(AppContext.class);
|
AppContext context = mock(RunningAppContext.class);
|
||||||
ApplicationId appId = appAttemptId.getApplicationId();
|
ApplicationId appId = appAttemptId.getApplicationId();
|
||||||
when(context.getApplicationID()).thenReturn(appId);
|
when(context.getApplicationID()).thenReturn(appId);
|
||||||
when(context.getApplicationAttemptId()).thenReturn(appAttemptId);
|
when(context.getApplicationAttemptId()).thenReturn(appAttemptId);
|
||||||
|
@ -104,8 +104,6 @@ private void startWebApp() {
|
|||||||
String bindAddress = conf.get(YarnConfiguration.TIMELINE_SERVICE_BIND_HOST,
|
String bindAddress = conf.get(YarnConfiguration.TIMELINE_SERVICE_BIND_HOST,
|
||||||
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_BIND_HOST) + ":0";
|
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_BIND_HOST) + ":0";
|
||||||
try {
|
try {
|
||||||
Configuration confForInfoServer = new Configuration(conf);
|
|
||||||
confForInfoServer.setInt(HttpServer2.HTTP_MAX_THREADS, 10);
|
|
||||||
HttpServer2.Builder builder = new HttpServer2.Builder()
|
HttpServer2.Builder builder = new HttpServer2.Builder()
|
||||||
.setName("timeline")
|
.setName("timeline")
|
||||||
.setConf(conf)
|
.setConf(conf)
|
||||||
|
Loading…
Reference in New Issue
Block a user