YARN-716. Making ApplicationID immutable. Contributed by Siddharth Seth.
MAPREDUCE-5282. Updating MR App to use immutable ApplicationID after YARN-716. Contributed by Siddharth Seth. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1487994 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
9f551fa542
commit
982753dc8e
@ -443,6 +443,9 @@ Release 2.0.5-beta - UNRELEASED
|
|||||||
MAPREDUCE-5261. Fix issues in TestRMContainerAllocator after YARN-617.
|
MAPREDUCE-5261. Fix issues in TestRMContainerAllocator after YARN-617.
|
||||||
(Omkar Vinit Joshi via vinodkv)
|
(Omkar Vinit Joshi via vinodkv)
|
||||||
|
|
||||||
|
MAPREDUCE-5282. Updating MR App to use immutable ApplicationID after
|
||||||
|
YARN-716. (Siddharth Seth via vinodkv)
|
||||||
|
|
||||||
BREAKDOWN OF HADOOP-8562 SUBTASKS
|
BREAKDOWN OF HADOOP-8562 SUBTASKS
|
||||||
|
|
||||||
MAPREDUCE-4739. Some MapReduce tests fail to find winutils.
|
MAPREDUCE-4739. Some MapReduce tests fail to find winutils.
|
||||||
|
@ -128,9 +128,7 @@ public class MRApp extends MRAppMaster {
|
|||||||
static ApplicationId applicationId;
|
static ApplicationId applicationId;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
applicationId = recordFactory.newRecordInstance(ApplicationId.class);
|
applicationId = ApplicationId.newInstance(0, 0);
|
||||||
applicationId.setClusterTimestamp(0);
|
|
||||||
applicationId.setId(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public MRApp(int maps, int reduces, boolean autoComplete, String testName,
|
public MRApp(int maps, int reduces, boolean autoComplete, String testName,
|
||||||
|
@ -789,9 +789,7 @@ class MyAppContext implements AppContext {
|
|||||||
private final Map<JobId, Job> allJobs;
|
private final Map<JobId, Job> allJobs;
|
||||||
|
|
||||||
MyAppContext(int numberMaps, int numberReduces) {
|
MyAppContext(int numberMaps, int numberReduces) {
|
||||||
myApplicationID = recordFactory.newRecordInstance(ApplicationId.class);
|
myApplicationID = ApplicationId.newInstance(clock.getTime(), 1);
|
||||||
myApplicationID.setClusterTimestamp(clock.getTime());
|
|
||||||
myApplicationID.setId(1);
|
|
||||||
|
|
||||||
myAppAttemptID = recordFactory
|
myAppAttemptID = recordFactory
|
||||||
.newRecordInstance(ApplicationAttemptId.class);
|
.newRecordInstance(ApplicationAttemptId.class);
|
||||||
|
@ -84,9 +84,8 @@ public void testDeletionofStaging() throws IOException {
|
|||||||
ApplicationAttemptId attemptId = recordFactory.newRecordInstance(
|
ApplicationAttemptId attemptId = recordFactory.newRecordInstance(
|
||||||
ApplicationAttemptId.class);
|
ApplicationAttemptId.class);
|
||||||
attemptId.setAttemptId(0);
|
attemptId.setAttemptId(0);
|
||||||
ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class);
|
ApplicationId appId = ApplicationId.newInstance(System.currentTimeMillis(),
|
||||||
appId.setClusterTimestamp(System.currentTimeMillis());
|
0);
|
||||||
appId.setId(0);
|
|
||||||
attemptId.setApplicationId(appId);
|
attemptId.setApplicationId(appId);
|
||||||
JobId jobid = recordFactory.newRecordInstance(JobId.class);
|
JobId jobid = recordFactory.newRecordInstance(JobId.class);
|
||||||
jobid.setAppId(appId);
|
jobid.setAppId(appId);
|
||||||
@ -113,9 +112,8 @@ public void testNoDeletionofStagingOnReboot() throws IOException {
|
|||||||
ApplicationAttemptId attemptId = recordFactory.newRecordInstance(
|
ApplicationAttemptId attemptId = recordFactory.newRecordInstance(
|
||||||
ApplicationAttemptId.class);
|
ApplicationAttemptId.class);
|
||||||
attemptId.setAttemptId(0);
|
attemptId.setAttemptId(0);
|
||||||
ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class);
|
ApplicationId appId = ApplicationId.newInstance(System.currentTimeMillis(),
|
||||||
appId.setClusterTimestamp(System.currentTimeMillis());
|
0);
|
||||||
appId.setId(0);
|
|
||||||
attemptId.setApplicationId(appId);
|
attemptId.setApplicationId(appId);
|
||||||
ContainerAllocator mockAlloc = mock(ContainerAllocator.class);
|
ContainerAllocator mockAlloc = mock(ContainerAllocator.class);
|
||||||
Assert.assertTrue(MRJobConfig.DEFAULT_MR_AM_MAX_ATTEMPTS > 1);
|
Assert.assertTrue(MRJobConfig.DEFAULT_MR_AM_MAX_ATTEMPTS > 1);
|
||||||
@ -141,9 +139,8 @@ public void testDeletionofStagingOnReboot() throws IOException {
|
|||||||
ApplicationAttemptId attemptId = recordFactory.newRecordInstance(
|
ApplicationAttemptId attemptId = recordFactory.newRecordInstance(
|
||||||
ApplicationAttemptId.class);
|
ApplicationAttemptId.class);
|
||||||
attemptId.setAttemptId(1);
|
attemptId.setAttemptId(1);
|
||||||
ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class);
|
ApplicationId appId = ApplicationId.newInstance(System.currentTimeMillis(),
|
||||||
appId.setClusterTimestamp(System.currentTimeMillis());
|
0);
|
||||||
appId.setId(0);
|
|
||||||
attemptId.setApplicationId(appId);
|
attemptId.setApplicationId(appId);
|
||||||
ContainerAllocator mockAlloc = mock(ContainerAllocator.class);
|
ContainerAllocator mockAlloc = mock(ContainerAllocator.class);
|
||||||
MRAppMaster appMaster = new TestMRApp(attemptId, mockAlloc,
|
MRAppMaster appMaster = new TestMRApp(attemptId, mockAlloc,
|
||||||
@ -169,9 +166,8 @@ public void testDeletionofStagingOnKill() throws IOException {
|
|||||||
ApplicationAttemptId attemptId = recordFactory.newRecordInstance(
|
ApplicationAttemptId attemptId = recordFactory.newRecordInstance(
|
||||||
ApplicationAttemptId.class);
|
ApplicationAttemptId.class);
|
||||||
attemptId.setAttemptId(0);
|
attemptId.setAttemptId(0);
|
||||||
ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class);
|
ApplicationId appId = ApplicationId.newInstance(System.currentTimeMillis(),
|
||||||
appId.setClusterTimestamp(System.currentTimeMillis());
|
0);
|
||||||
appId.setId(0);
|
|
||||||
attemptId.setApplicationId(appId);
|
attemptId.setApplicationId(appId);
|
||||||
JobId jobid = recordFactory.newRecordInstance(JobId.class);
|
JobId jobid = recordFactory.newRecordInstance(JobId.class);
|
||||||
jobid.setAppId(appId);
|
jobid.setAppId(appId);
|
||||||
@ -197,9 +193,8 @@ public void testDeletionofStagingOnKillLastTry() throws IOException {
|
|||||||
ApplicationAttemptId attemptId = recordFactory.newRecordInstance(
|
ApplicationAttemptId attemptId = recordFactory.newRecordInstance(
|
||||||
ApplicationAttemptId.class);
|
ApplicationAttemptId.class);
|
||||||
attemptId.setAttemptId(1);
|
attemptId.setAttemptId(1);
|
||||||
ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class);
|
ApplicationId appId = ApplicationId.newInstance(System.currentTimeMillis(),
|
||||||
appId.setClusterTimestamp(System.currentTimeMillis());
|
0);
|
||||||
appId.setId(0);
|
|
||||||
attemptId.setApplicationId(appId);
|
attemptId.setApplicationId(appId);
|
||||||
JobId jobid = recordFactory.newRecordInstance(JobId.class);
|
JobId jobid = recordFactory.newRecordInstance(JobId.class);
|
||||||
jobid.setAppId(appId);
|
jobid.setAppId(appId);
|
||||||
|
@ -224,9 +224,7 @@ public void setup() {
|
|||||||
metrics = mock(MRAppMetrics.class);
|
metrics = mock(MRAppMetrics.class);
|
||||||
dataLocations = new String[1];
|
dataLocations = new String[1];
|
||||||
|
|
||||||
appId = Records.newRecord(ApplicationId.class);
|
appId = ApplicationId.newInstance(System.currentTimeMillis(), 1);
|
||||||
appId.setClusterTimestamp(System.currentTimeMillis());
|
|
||||||
appId.setId(1);
|
|
||||||
|
|
||||||
jobId = Records.newRecord(JobId.class);
|
jobId = Records.newRecord(JobId.class);
|
||||||
jobId.setId(1);
|
jobId.setId(1);
|
||||||
|
@ -27,7 +27,6 @@
|
|||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
import org.apache.hadoop.yarn.util.Records;
|
|
||||||
import org.apache.hadoop.yarn.webapp.Controller.RequestContext;
|
import org.apache.hadoop.yarn.webapp.Controller.RequestContext;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
@ -41,7 +40,7 @@ public class TestAppController {
|
|||||||
public void setUp() {
|
public void setUp() {
|
||||||
AppContext context = mock(AppContext.class);
|
AppContext context = mock(AppContext.class);
|
||||||
when(context.getApplicationID()).thenReturn(
|
when(context.getApplicationID()).thenReturn(
|
||||||
Records.newRecord(ApplicationId.class));
|
ApplicationId.newInstance(0, 0));
|
||||||
App app = new App(context);
|
App app = new App(context);
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
ctx = mock(RequestContext.class);
|
ctx = mock(RequestContext.class);
|
||||||
|
@ -76,9 +76,8 @@ public static JobId toYarn(org.apache.hadoop.mapreduce.JobID id) {
|
|||||||
JobId jobId = recordFactory.newRecordInstance(JobId.class);
|
JobId jobId = recordFactory.newRecordInstance(JobId.class);
|
||||||
jobId.setId(id.getId()); //currently there is 1-1 mapping between appid and jobid
|
jobId.setId(id.getId()); //currently there is 1-1 mapping between appid and jobid
|
||||||
|
|
||||||
ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class);
|
ApplicationId appId = ApplicationId.newInstance(
|
||||||
appId.setId(id.getId());
|
toClusterTimeStamp(id.getJtIdentifier()), id.getId());
|
||||||
appId.setClusterTimestamp(toClusterTimeStamp(id.getJtIdentifier()));
|
|
||||||
jobId.setAppId(appId);
|
jobId.setAppId(appId);
|
||||||
return jobId;
|
return jobId;
|
||||||
}
|
}
|
||||||
|
@ -17,6 +17,9 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapreduce;
|
package org.apache.hadoop.mapreduce;
|
||||||
|
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
@ -28,18 +31,13 @@
|
|||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
|
|
||||||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||||
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
|
import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport;
|
||||||
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationReportPBImpl;
|
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
|
||||||
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationResourceUsageReportPBImpl;
|
|
||||||
import org.apache.hadoop.yarn.api.records.impl.pb.QueueInfoPBImpl;
|
|
||||||
import org.apache.hadoop.yarn.api.records.impl.pb.ResourcePBImpl;
|
|
||||||
import org.apache.hadoop.yarn.api.records.QueueState;
|
import org.apache.hadoop.yarn.api.records.QueueState;
|
||||||
|
import org.apache.hadoop.yarn.api.records.Resource;
|
||||||
import static org.mockito.Mockito.mock;
|
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||||
import static org.mockito.Mockito.when;
|
import org.apache.hadoop.yarn.util.Records;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
|
||||||
@ -74,14 +72,16 @@ public void testEnums() throws Exception {
|
|||||||
public void testFromYarn() throws Exception {
|
public void testFromYarn() throws Exception {
|
||||||
int appStartTime = 612354;
|
int appStartTime = 612354;
|
||||||
YarnApplicationState state = YarnApplicationState.RUNNING;
|
YarnApplicationState state = YarnApplicationState.RUNNING;
|
||||||
ApplicationId applicationId = new ApplicationIdPBImpl();
|
ApplicationId applicationId = ApplicationId.newInstance(0, 0);
|
||||||
ApplicationReportPBImpl applicationReport = new ApplicationReportPBImpl();
|
ApplicationReport applicationReport = Records
|
||||||
|
.newRecord(ApplicationReport.class);
|
||||||
applicationReport.setApplicationId(applicationId);
|
applicationReport.setApplicationId(applicationId);
|
||||||
applicationReport.setYarnApplicationState(state);
|
applicationReport.setYarnApplicationState(state);
|
||||||
applicationReport.setStartTime(appStartTime);
|
applicationReport.setStartTime(appStartTime);
|
||||||
applicationReport.setUser("TestTypeConverter-user");
|
applicationReport.setUser("TestTypeConverter-user");
|
||||||
ApplicationResourceUsageReportPBImpl appUsageRpt = new ApplicationResourceUsageReportPBImpl();
|
ApplicationResourceUsageReport appUsageRpt = Records
|
||||||
ResourcePBImpl r = new ResourcePBImpl();
|
.newRecord(ApplicationResourceUsageReport.class);
|
||||||
|
Resource r = Records.newRecord(Resource.class);
|
||||||
r.setMemory(2048);
|
r.setMemory(2048);
|
||||||
appUsageRpt.setNeededResources(r);
|
appUsageRpt.setNeededResources(r);
|
||||||
appUsageRpt.setNumReservedContainers(1);
|
appUsageRpt.setNumReservedContainers(1);
|
||||||
@ -107,8 +107,9 @@ public void testFromYarnApplicationReport() {
|
|||||||
when(mockReport.getUser()).thenReturn("dummy-user");
|
when(mockReport.getUser()).thenReturn("dummy-user");
|
||||||
when(mockReport.getQueue()).thenReturn("dummy-queue");
|
when(mockReport.getQueue()).thenReturn("dummy-queue");
|
||||||
String jobFile = "dummy-path/job.xml";
|
String jobFile = "dummy-path/job.xml";
|
||||||
ApplicationResourceUsageReportPBImpl appUsageRpt = new ApplicationResourceUsageReportPBImpl();
|
ApplicationResourceUsageReport appUsageRpt = Records
|
||||||
ResourcePBImpl r = new ResourcePBImpl();
|
.newRecord(ApplicationResourceUsageReport.class);
|
||||||
|
Resource r = Records.newRecord(Resource.class);
|
||||||
r.setMemory(2048);
|
r.setMemory(2048);
|
||||||
appUsageRpt.setNeededResources(r);
|
appUsageRpt.setNeededResources(r);
|
||||||
appUsageRpt.setNumReservedContainers(1);
|
appUsageRpt.setNumReservedContainers(1);
|
||||||
@ -134,7 +135,8 @@ public void testFromYarnApplicationReport() {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testFromYarnQueueInfo() {
|
public void testFromYarnQueueInfo() {
|
||||||
org.apache.hadoop.yarn.api.records.QueueInfo queueInfo = new QueueInfoPBImpl();
|
org.apache.hadoop.yarn.api.records.QueueInfo queueInfo = Records
|
||||||
|
.newRecord(org.apache.hadoop.yarn.api.records.QueueInfo.class);
|
||||||
queueInfo.setQueueState(org.apache.hadoop.yarn.api.records.QueueState.STOPPED);
|
queueInfo.setQueueState(org.apache.hadoop.yarn.api.records.QueueState.STOPPED);
|
||||||
org.apache.hadoop.mapreduce.QueueInfo returned =
|
org.apache.hadoop.mapreduce.QueueInfo returned =
|
||||||
TypeConverter.fromYarn(queueInfo, new Configuration());
|
TypeConverter.fromYarn(queueInfo, new Configuration());
|
||||||
|
@ -81,7 +81,7 @@ private static void delete(File dir) throws IOException {
|
|||||||
@Test (timeout = 120000)
|
@Test (timeout = 120000)
|
||||||
public void testJobIDtoString() {
|
public void testJobIDtoString() {
|
||||||
JobId jid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class);
|
JobId jid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class);
|
||||||
jid.setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class));
|
jid.setAppId(ApplicationId.newInstance(0, 0));
|
||||||
assertEquals("job_0_0000", MRApps.toString(jid));
|
assertEquals("job_0_0000", MRApps.toString(jid));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -103,7 +103,7 @@ public void testJobIDShort() {
|
|||||||
public void testTaskIDtoString() {
|
public void testTaskIDtoString() {
|
||||||
TaskId tid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class);
|
TaskId tid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class);
|
||||||
tid.setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class));
|
tid.setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class));
|
||||||
tid.getJobId().setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class));
|
tid.getJobId().setAppId(ApplicationId.newInstance(0, 0));
|
||||||
tid.setTaskType(TaskType.MAP);
|
tid.setTaskType(TaskType.MAP);
|
||||||
TaskType type = tid.getTaskType();
|
TaskType type = tid.getTaskType();
|
||||||
System.err.println(type);
|
System.err.println(type);
|
||||||
@ -145,7 +145,7 @@ public void testTaskAttemptIDtoString() {
|
|||||||
taid.setTaskId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class));
|
taid.setTaskId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class));
|
||||||
taid.getTaskId().setTaskType(TaskType.MAP);
|
taid.getTaskId().setTaskType(TaskType.MAP);
|
||||||
taid.getTaskId().setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class));
|
taid.getTaskId().setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class));
|
||||||
taid.getTaskId().getJobId().setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class));
|
taid.getTaskId().getJobId().setAppId(ApplicationId.newInstance(0, 0));
|
||||||
assertEquals("attempt_0_0000_m_000000_0", MRApps.toString(taid));
|
assertEquals("attempt_0_0000_m_000000_0", MRApps.toString(taid));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,7 +21,6 @@
|
|||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.*;
|
||||||
import static org.mockito.Mockito.*;
|
import static org.mockito.Mockito.*;
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
|
||||||
import org.apache.hadoop.fs.LocalDirAllocator;
|
import org.apache.hadoop.fs.LocalDirAllocator;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.mapred.Task.CombineOutputCollector;
|
import org.apache.hadoop.mapred.Task.CombineOutputCollector;
|
||||||
@ -30,7 +29,6 @@
|
|||||||
import org.apache.hadoop.mapred.Reporter;
|
import org.apache.hadoop.mapred.Reporter;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
import org.apache.hadoop.mapreduce.task.reduce.Shuffle;
|
import org.apache.hadoop.mapreduce.task.reduce.Shuffle;
|
||||||
import org.apache.hadoop.mapred.Counters;
|
|
||||||
import org.apache.hadoop.mapred.Counters.Counter;
|
import org.apache.hadoop.mapred.Counters.Counter;
|
||||||
import org.apache.hadoop.mapred.MapOutputFile;
|
import org.apache.hadoop.mapred.MapOutputFile;
|
||||||
import org.apache.hadoop.mapred.JobConf;
|
import org.apache.hadoop.mapred.JobConf;
|
||||||
@ -40,7 +38,6 @@
|
|||||||
import org.apache.hadoop.mapred.TaskUmbilicalProtocol;
|
import org.apache.hadoop.mapred.TaskUmbilicalProtocol;
|
||||||
import org.apache.hadoop.mapred.ShuffleConsumerPlugin;
|
import org.apache.hadoop.mapred.ShuffleConsumerPlugin;
|
||||||
import org.apache.hadoop.mapred.RawKeyValueIterator;
|
import org.apache.hadoop.mapred.RawKeyValueIterator;
|
||||||
import org.apache.hadoop.mapred.Reducer;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A JUnit for testing availability and accessibility of shuffle related API.
|
* A JUnit for testing availability and accessibility of shuffle related API.
|
||||||
@ -181,10 +178,6 @@ public void testConsumerApi() {
|
|||||||
* AuxiliaryService(s) which are "Shuffle-Providers" (ShuffleHandler and 3rd party plugins)
|
* AuxiliaryService(s) which are "Shuffle-Providers" (ShuffleHandler and 3rd party plugins)
|
||||||
*/
|
*/
|
||||||
public void testProviderApi() {
|
public void testProviderApi() {
|
||||||
|
|
||||||
ApplicationId mockApplicationId = mock(ApplicationId.class);
|
|
||||||
mockApplicationId.setClusterTimestamp(new Long(10));
|
|
||||||
mockApplicationId.setId(mock(JobID.class).getId());
|
|
||||||
LocalDirAllocator mockLocalDirAllocator = mock(LocalDirAllocator.class);
|
LocalDirAllocator mockLocalDirAllocator = mock(LocalDirAllocator.class);
|
||||||
JobConf mockJobConf = mock(JobConf.class);
|
JobConf mockJobConf = mock(JobConf.class);
|
||||||
try {
|
try {
|
||||||
|
@ -74,8 +74,7 @@ public class JobHistory extends AbstractService implements HistoryContext {
|
|||||||
public void init(Configuration conf) throws YarnException {
|
public void init(Configuration conf) throws YarnException {
|
||||||
LOG.info("JobHistory Init");
|
LOG.info("JobHistory Init");
|
||||||
this.conf = conf;
|
this.conf = conf;
|
||||||
this.appID = RecordFactoryProvider.getRecordFactory(conf)
|
this.appID = ApplicationId.newInstance(0, 0);
|
||||||
.newRecordInstance(ApplicationId.class);
|
|
||||||
this.appAttemptID = RecordFactoryProvider.getRecordFactory(conf)
|
this.appAttemptID = RecordFactoryProvider.getRecordFactory(conf)
|
||||||
.newRecordInstance(ApplicationAttemptId.class);
|
.newRecordInstance(ApplicationAttemptId.class);
|
||||||
|
|
||||||
|
@ -129,7 +129,7 @@ private ApplicationReport getApplicationReport(
|
|||||||
ApplicationResourceUsageReport appResources = Mockito
|
ApplicationResourceUsageReport appResources = Mockito
|
||||||
.mock(ApplicationResourceUsageReport.class);
|
.mock(ApplicationResourceUsageReport.class);
|
||||||
Mockito.when(appReport.getApplicationId()).thenReturn(
|
Mockito.when(appReport.getApplicationId()).thenReturn(
|
||||||
Records.newRecord(ApplicationId.class));
|
ApplicationId.newInstance(0, 0));
|
||||||
Mockito.when(appResources.getNeededResources()).thenReturn(
|
Mockito.when(appResources.getNeededResources()).thenReturn(
|
||||||
Records.newRecord(Resource.class));
|
Records.newRecord(Resource.class));
|
||||||
Mockito.when(appResources.getReservedResources()).thenReturn(
|
Mockito.when(appResources.getReservedResources()).thenReturn(
|
||||||
|
@ -140,9 +140,7 @@ public ApplicationSubmissionContext answer(InvocationOnMock invocation)
|
|||||||
).when(yarnRunner).createApplicationSubmissionContext(any(Configuration.class),
|
).when(yarnRunner).createApplicationSubmissionContext(any(Configuration.class),
|
||||||
any(String.class), any(Credentials.class));
|
any(String.class), any(Credentials.class));
|
||||||
|
|
||||||
appId = recordFactory.newRecordInstance(ApplicationId.class);
|
appId = ApplicationId.newInstance(System.currentTimeMillis(), 1);
|
||||||
appId.setClusterTimestamp(System.currentTimeMillis());
|
|
||||||
appId.setId(1);
|
|
||||||
jobId = TypeConverter.fromYarn(appId);
|
jobId = TypeConverter.fromYarn(appId);
|
||||||
if (testWorkDir.exists()) {
|
if (testWorkDir.exists()) {
|
||||||
FileContext.getLocalFSFileContext().delete(new Path(testWorkDir.toString()), true);
|
FileContext.getLocalFSFileContext().delete(new Path(testWorkDir.toString()), true);
|
||||||
|
@ -79,7 +79,6 @@
|
|||||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer;
|
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer;
|
||||||
import org.apache.hadoop.yarn.service.AbstractService;
|
import org.apache.hadoop.yarn.service.AbstractService;
|
||||||
import org.apache.hadoop.yarn.util.ConverterUtils;
|
import org.apache.hadoop.yarn.util.ConverterUtils;
|
||||||
import org.apache.hadoop.yarn.util.Records;
|
|
||||||
import org.jboss.netty.bootstrap.ServerBootstrap;
|
import org.jboss.netty.bootstrap.ServerBootstrap;
|
||||||
import org.jboss.netty.buffer.ChannelBuffers;
|
import org.jboss.netty.buffer.ChannelBuffers;
|
||||||
import org.jboss.netty.channel.Channel;
|
import org.jboss.netty.channel.Channel;
|
||||||
@ -549,9 +548,8 @@ protected ChannelFuture sendMapOutput(ChannelHandlerContext ctx, Channel ch,
|
|||||||
// $x/$user/appcache/$appId/output/$mapId
|
// $x/$user/appcache/$appId/output/$mapId
|
||||||
// TODO: Once Shuffle is out of NM, this can use MR APIs to convert between App and Job
|
// TODO: Once Shuffle is out of NM, this can use MR APIs to convert between App and Job
|
||||||
JobID jobID = JobID.forName(jobId);
|
JobID jobID = JobID.forName(jobId);
|
||||||
ApplicationId appID = Records.newRecord(ApplicationId.class);
|
ApplicationId appID = ApplicationId.newInstance(
|
||||||
appID.setClusterTimestamp(Long.parseLong(jobID.getJtIdentifier()));
|
Long.parseLong(jobID.getJtIdentifier()), jobID.getId());
|
||||||
appID.setId(jobID.getId());
|
|
||||||
final String base =
|
final String base =
|
||||||
ContainerLocalizer.USERCACHE + "/" + user + "/"
|
ContainerLocalizer.USERCACHE + "/" + user + "/"
|
||||||
+ ContainerLocalizer.APPCACHE + "/"
|
+ ContainerLocalizer.APPCACHE + "/"
|
||||||
|
@ -72,6 +72,8 @@ Release 2.0.5-beta - UNRELEASED
|
|||||||
YARN-571. Remove user from ContainerLaunchContext. (Omkar Vinit Joshi via
|
YARN-571. Remove user from ContainerLaunchContext. (Omkar Vinit Joshi via
|
||||||
vinodkv)
|
vinodkv)
|
||||||
|
|
||||||
|
YARN-716. Making ApplicationID immutable. (Siddharth Seth via vinodkv)
|
||||||
|
|
||||||
NEW FEATURES
|
NEW FEATURES
|
||||||
|
|
||||||
YARN-482. FS: Extend SchedulingMode to intermediate queues.
|
YARN-482. FS: Extend SchedulingMode to intermediate queues.
|
||||||
|
@ -23,7 +23,7 @@
|
|||||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||||
import org.apache.hadoop.classification.InterfaceStability.Stable;
|
import org.apache.hadoop.classification.InterfaceStability.Stable;
|
||||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
import org.apache.hadoop.yarn.util.Records;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* <p><code>ApplicationId</code> represents the <em>globally unique</em>
|
* <p><code>ApplicationId</code> represents the <em>globally unique</em>
|
||||||
@ -40,6 +40,14 @@ public abstract class ApplicationId implements Comparable<ApplicationId> {
|
|||||||
|
|
||||||
public static final String appIdStrPrefix = "application_";
|
public static final String appIdStrPrefix = "application_";
|
||||||
|
|
||||||
|
public static ApplicationId newInstance(long clusterTimestamp, int id) {
|
||||||
|
ApplicationId appId = Records.newRecord(ApplicationId.class);
|
||||||
|
appId.setClusterTimestamp(clusterTimestamp);
|
||||||
|
appId.setId(id);
|
||||||
|
appId.build();
|
||||||
|
return appId;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the short integer identifier of the <code>ApplicationId</code>
|
* Get the short integer identifier of the <code>ApplicationId</code>
|
||||||
* which is unique for all applications started by a particular instance
|
* which is unique for all applications started by a particular instance
|
||||||
@ -51,8 +59,7 @@ public abstract class ApplicationId implements Comparable<ApplicationId> {
|
|||||||
public abstract int getId();
|
public abstract int getId();
|
||||||
|
|
||||||
@Private
|
@Private
|
||||||
@Unstable
|
protected abstract void setId(int id);
|
||||||
public abstract void setId(int id);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the <em>start time</em> of the <code>ResourceManager</code> which is
|
* Get the <em>start time</em> of the <code>ResourceManager</code> which is
|
||||||
@ -62,10 +69,9 @@ public abstract class ApplicationId implements Comparable<ApplicationId> {
|
|||||||
public abstract long getClusterTimestamp();
|
public abstract long getClusterTimestamp();
|
||||||
|
|
||||||
@Private
|
@Private
|
||||||
@Unstable
|
protected abstract void setClusterTimestamp(long clusterTimestamp);
|
||||||
public abstract void setClusterTimestamp(long clusterTimestamp);
|
|
||||||
|
|
||||||
|
protected abstract void build();
|
||||||
|
|
||||||
static final ThreadLocal<NumberFormat> appIdFormat =
|
static final ThreadLocal<NumberFormat> appIdFormat =
|
||||||
new ThreadLocal<NumberFormat>() {
|
new ThreadLocal<NumberFormat>() {
|
||||||
|
@ -21,58 +21,49 @@
|
|||||||
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
|
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder;
|
|
||||||
|
import com.google.common.base.Preconditions;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
public class ApplicationIdPBImpl extends ApplicationId {
|
public class ApplicationIdPBImpl extends ApplicationId {
|
||||||
ApplicationIdProto proto = ApplicationIdProto.getDefaultInstance();
|
ApplicationIdProto proto = null;
|
||||||
ApplicationIdProto.Builder builder = null;
|
ApplicationIdProto.Builder builder = null;
|
||||||
boolean viaProto = false;
|
|
||||||
|
|
||||||
public ApplicationIdPBImpl() {
|
public ApplicationIdPBImpl() {
|
||||||
builder = ApplicationIdProto.newBuilder();
|
builder = ApplicationIdProto.newBuilder();
|
||||||
}
|
}
|
||||||
|
|
||||||
public ApplicationIdPBImpl(ApplicationIdProto proto) {
|
public ApplicationIdPBImpl(ApplicationIdProto proto) {
|
||||||
this.proto = proto;
|
this.proto = proto;
|
||||||
viaProto = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public synchronized ApplicationIdProto getProto() {
|
public ApplicationIdProto getProto() {
|
||||||
proto = viaProto ? proto : builder.build();
|
|
||||||
viaProto = true;
|
|
||||||
return proto;
|
return proto;
|
||||||
}
|
}
|
||||||
|
|
||||||
private synchronized void maybeInitBuilder() {
|
|
||||||
if (viaProto || builder == null) {
|
|
||||||
builder = ApplicationIdProto.newBuilder(proto);
|
|
||||||
}
|
|
||||||
viaProto = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized int getId() {
|
public int getId() {
|
||||||
ApplicationIdProtoOrBuilder p = viaProto ? proto : builder;
|
Preconditions.checkNotNull(proto);
|
||||||
return (p.getId());
|
return proto.getId();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void setId(int id) {
|
protected void setId(int id) {
|
||||||
maybeInitBuilder();
|
builder.setId(id);
|
||||||
builder.setId((id));
|
|
||||||
}
|
}
|
||||||
@Override
|
@Override
|
||||||
public synchronized long getClusterTimestamp() {
|
public long getClusterTimestamp() {
|
||||||
ApplicationIdProtoOrBuilder p = viaProto ? proto : builder;
|
Preconditions.checkNotNull(proto);
|
||||||
return (p.getClusterTimestamp());
|
return proto.getClusterTimestamp();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void setClusterTimestamp(long clusterTimestamp) {
|
protected void setClusterTimestamp(long clusterTimestamp) {
|
||||||
maybeInitBuilder();
|
|
||||||
builder.setClusterTimestamp((clusterTimestamp));
|
builder.setClusterTimestamp((clusterTimestamp));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void build() {
|
||||||
|
proto = builder.build();
|
||||||
|
}
|
||||||
}
|
}
|
@ -43,7 +43,6 @@
|
|||||||
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
|
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
|
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
|
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
|
||||||
import org.apache.hadoop.yarn.util.Records;
|
|
||||||
import org.apache.log4j.Level;
|
import org.apache.log4j.Level;
|
||||||
import org.apache.log4j.LogManager;
|
import org.apache.log4j.LogManager;
|
||||||
import org.apache.log4j.Logger;
|
import org.apache.log4j.Logger;
|
||||||
@ -90,9 +89,8 @@ public void testSubmitApplication() {
|
|||||||
for (int i = 0; i < exitStates.length; ++i) {
|
for (int i = 0; i < exitStates.length; ++i) {
|
||||||
ApplicationSubmissionContext context =
|
ApplicationSubmissionContext context =
|
||||||
mock(ApplicationSubmissionContext.class);
|
mock(ApplicationSubmissionContext.class);
|
||||||
ApplicationId applicationId = Records.newRecord(ApplicationId.class);
|
ApplicationId applicationId = ApplicationId.newInstance(
|
||||||
applicationId.setClusterTimestamp(System.currentTimeMillis());
|
System.currentTimeMillis(), i);
|
||||||
applicationId.setId(i);
|
|
||||||
when(context.getApplicationId()).thenReturn(applicationId);
|
when(context.getApplicationId()).thenReturn(applicationId);
|
||||||
((MockYarnClient) client).setYarnApplicationState(exitStates[i]);
|
((MockYarnClient) client).setYarnApplicationState(exitStates[i]);
|
||||||
try {
|
try {
|
||||||
|
@ -24,7 +24,6 @@
|
|||||||
import org.apache.hadoop.util.StringInterner;
|
import org.apache.hadoop.util.StringInterner;
|
||||||
import org.apache.hadoop.yarn.YarnException;
|
import org.apache.hadoop.yarn.YarnException;
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
|
||||||
|
|
||||||
import static org.apache.hadoop.yarn.util.StringHelper.*;
|
import static org.apache.hadoop.yarn.util.StringHelper.*;
|
||||||
|
|
||||||
@ -45,10 +44,8 @@ public static ApplicationId toAppID(String prefix, String s, Iterator<String> it
|
|||||||
throwParseException(sjoin(prefix, ID), s);
|
throwParseException(sjoin(prefix, ID), s);
|
||||||
}
|
}
|
||||||
shouldHaveNext(prefix, s, it);
|
shouldHaveNext(prefix, s, it);
|
||||||
ApplicationId appId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class);
|
ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()),
|
||||||
appId.setClusterTimestamp(Long.parseLong(it.next()));
|
Integer.parseInt(it.next()));
|
||||||
shouldHaveNext(prefix, s, it);
|
|
||||||
appId.setId(Integer.parseInt(it.next()));
|
|
||||||
return appId;
|
return appId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -132,28 +132,17 @@ public static LocalResource newLocalResource(URI uri,
|
|||||||
|
|
||||||
public static ApplicationId newApplicationId(RecordFactory recordFactory,
|
public static ApplicationId newApplicationId(RecordFactory recordFactory,
|
||||||
long clustertimestamp, CharSequence id) {
|
long clustertimestamp, CharSequence id) {
|
||||||
ApplicationId applicationId =
|
return ApplicationId.newInstance(clustertimestamp,
|
||||||
recordFactory.newRecordInstance(ApplicationId.class);
|
Integer.valueOf(id.toString()));
|
||||||
applicationId.setId(Integer.valueOf(id.toString()));
|
|
||||||
applicationId.setClusterTimestamp(clustertimestamp);
|
|
||||||
return applicationId;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ApplicationId newApplicationId(RecordFactory recordFactory,
|
public static ApplicationId newApplicationId(RecordFactory recordFactory,
|
||||||
long clusterTimeStamp, int id) {
|
long clusterTimeStamp, int id) {
|
||||||
ApplicationId applicationId =
|
return ApplicationId.newInstance(clusterTimeStamp, id);
|
||||||
recordFactory.newRecordInstance(ApplicationId.class);
|
|
||||||
applicationId.setId(id);
|
|
||||||
applicationId.setClusterTimestamp(clusterTimeStamp);
|
|
||||||
return applicationId;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ApplicationId newApplicationId(long clusterTimeStamp, int id) {
|
public static ApplicationId newApplicationId(long clusterTimeStamp, int id) {
|
||||||
ApplicationId applicationId =
|
return ApplicationId.newInstance(clusterTimeStamp, id);
|
||||||
recordFactory.newRecordInstance(ApplicationId.class);
|
|
||||||
applicationId.setId(id);
|
|
||||||
applicationId.setClusterTimestamp(clusterTimeStamp);
|
|
||||||
return applicationId;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ApplicationAttemptId newApplicationAttemptId(
|
public static ApplicationAttemptId newApplicationAttemptId(
|
||||||
@ -166,11 +155,8 @@ public static ApplicationAttemptId newApplicationAttemptId(
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static ApplicationId convert(long clustertimestamp, CharSequence id) {
|
public static ApplicationId convert(long clustertimestamp, CharSequence id) {
|
||||||
ApplicationId applicationId =
|
return ApplicationId.newInstance(clustertimestamp,
|
||||||
recordFactory.newRecordInstance(ApplicationId.class);
|
Integer.valueOf(id.toString()));
|
||||||
applicationId.setId(Integer.valueOf(id.toString()));
|
|
||||||
applicationId.setClusterTimestamp(clustertimestamp);
|
|
||||||
return applicationId;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ContainerId newContainerId(ApplicationAttemptId appAttemptId,
|
public static ContainerId newContainerId(ApplicationAttemptId appAttemptId,
|
||||||
|
@ -114,18 +114,15 @@ public static ApplicationId toApplicationId(RecordFactory recordFactory,
|
|||||||
|
|
||||||
private static ApplicationId toApplicationId(RecordFactory recordFactory,
|
private static ApplicationId toApplicationId(RecordFactory recordFactory,
|
||||||
Iterator<String> it) {
|
Iterator<String> it) {
|
||||||
ApplicationId appId =
|
ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()),
|
||||||
recordFactory.newRecordInstance(ApplicationId.class);
|
Integer.parseInt(it.next()));
|
||||||
appId.setClusterTimestamp(Long.parseLong(it.next()));
|
|
||||||
appId.setId(Integer.parseInt(it.next()));
|
|
||||||
return appId;
|
return appId;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static ApplicationAttemptId toApplicationAttemptId(
|
private static ApplicationAttemptId toApplicationAttemptId(
|
||||||
Iterator<String> it) throws NumberFormatException {
|
Iterator<String> it) throws NumberFormatException {
|
||||||
ApplicationId appId = Records.newRecord(ApplicationId.class);
|
ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()),
|
||||||
appId.setClusterTimestamp(Long.parseLong(it.next()));
|
Integer.parseInt(it.next()));
|
||||||
appId.setId(Integer.parseInt(it.next()));
|
|
||||||
ApplicationAttemptId appAttemptId = Records
|
ApplicationAttemptId appAttemptId = Records
|
||||||
.newRecord(ApplicationAttemptId.class);
|
.newRecord(ApplicationAttemptId.class);
|
||||||
appAttemptId.setApplicationId(appId);
|
appAttemptId.setApplicationId(appId);
|
||||||
@ -135,9 +132,8 @@ private static ApplicationAttemptId toApplicationAttemptId(
|
|||||||
|
|
||||||
private static ApplicationId toApplicationId(
|
private static ApplicationId toApplicationId(
|
||||||
Iterator<String> it) throws NumberFormatException {
|
Iterator<String> it) throws NumberFormatException {
|
||||||
ApplicationId appId = Records.newRecord(ApplicationId.class);
|
ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()),
|
||||||
appId.setClusterTimestamp(Long.parseLong(it.next()));
|
Integer.parseInt(it.next()));
|
||||||
appId.setId(Integer.parseInt(it.next()));
|
|
||||||
return appId;
|
return appId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,10 +62,7 @@ public static String newQueue() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static ApplicationId newAppID(int i) {
|
public static ApplicationId newAppID(int i) {
|
||||||
ApplicationId id = Records.newRecord(ApplicationId.class);
|
return ApplicationId.newInstance(TS, i);
|
||||||
id.setClusterTimestamp(TS);
|
|
||||||
id.setId(i);
|
|
||||||
return id;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ApplicationAttemptId newAppAttemptID(ApplicationId appId, int i) {
|
public static ApplicationAttemptId newAppAttemptID(ApplicationId appId, int i) {
|
||||||
|
@ -91,12 +91,9 @@ private void testRPCTimeout(String rpcClass) throws Exception {
|
|||||||
.newRecordInstance(ContainerLaunchContext.class);
|
.newRecordInstance(ContainerLaunchContext.class);
|
||||||
ContainerId containerId = recordFactory
|
ContainerId containerId = recordFactory
|
||||||
.newRecordInstance(ContainerId.class);
|
.newRecordInstance(ContainerId.class);
|
||||||
ApplicationId applicationId = recordFactory
|
ApplicationId applicationId = ApplicationId.newInstance(0, 0);
|
||||||
.newRecordInstance(ApplicationId.class);
|
|
||||||
ApplicationAttemptId applicationAttemptId = recordFactory
|
ApplicationAttemptId applicationAttemptId = recordFactory
|
||||||
.newRecordInstance(ApplicationAttemptId.class);
|
.newRecordInstance(ApplicationAttemptId.class);
|
||||||
applicationId.setClusterTimestamp(0);
|
|
||||||
applicationId.setId(0);
|
|
||||||
applicationAttemptId.setApplicationId(applicationId);
|
applicationAttemptId.setApplicationId(applicationId);
|
||||||
applicationAttemptId.setAttemptId(0);
|
applicationAttemptId.setAttemptId(0);
|
||||||
containerId.setApplicationAttemptId(applicationAttemptId);
|
containerId.setApplicationAttemptId(applicationAttemptId);
|
||||||
|
@ -113,12 +113,9 @@ private void test(String rpcClass) throws Exception {
|
|||||||
recordFactory.newRecordInstance(ContainerLaunchContext.class);
|
recordFactory.newRecordInstance(ContainerLaunchContext.class);
|
||||||
ContainerId containerId =
|
ContainerId containerId =
|
||||||
recordFactory.newRecordInstance(ContainerId.class);
|
recordFactory.newRecordInstance(ContainerId.class);
|
||||||
ApplicationId applicationId =
|
ApplicationId applicationId = ApplicationId.newInstance(0, 0);
|
||||||
recordFactory.newRecordInstance(ApplicationId.class);
|
|
||||||
ApplicationAttemptId applicationAttemptId =
|
ApplicationAttemptId applicationAttemptId =
|
||||||
recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
||||||
applicationId.setClusterTimestamp(0);
|
|
||||||
applicationId.setId(0);
|
|
||||||
applicationAttemptId.setApplicationId(applicationId);
|
applicationAttemptId.setApplicationId(applicationId);
|
||||||
applicationAttemptId.setAttemptId(0);
|
applicationAttemptId.setAttemptId(0);
|
||||||
containerId.setApplicationAttemptId(applicationAttemptId);
|
containerId.setApplicationAttemptId(applicationAttemptId);
|
||||||
|
@ -126,10 +126,7 @@ public long getRMIdentifier() {
|
|||||||
ContainerLaunchContext launchContext =
|
ContainerLaunchContext launchContext =
|
||||||
recordFactory.newRecordInstance(ContainerLaunchContext.class);
|
recordFactory.newRecordInstance(ContainerLaunchContext.class);
|
||||||
ContainerId cID = recordFactory.newRecordInstance(ContainerId.class);
|
ContainerId cID = recordFactory.newRecordInstance(ContainerId.class);
|
||||||
ApplicationId applicationId =
|
ApplicationId applicationId = ApplicationId.newInstance(0, 0);
|
||||||
recordFactory.newRecordInstance(ApplicationId.class);
|
|
||||||
applicationId.setClusterTimestamp(0);
|
|
||||||
applicationId.setId(0);
|
|
||||||
ApplicationAttemptId applicationAttemptId =
|
ApplicationAttemptId applicationAttemptId =
|
||||||
recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
||||||
applicationAttemptId.setApplicationId(applicationId);
|
applicationAttemptId.setApplicationId(applicationId);
|
||||||
|
@ -249,9 +249,7 @@ private void createFiles(String dir, String subDir, int numOfFiles) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private ContainerId createContainerId() {
|
private ContainerId createContainerId() {
|
||||||
ApplicationId appId = Records.newRecord(ApplicationId.class);
|
ApplicationId appId = ApplicationId.newInstance(0, 0);
|
||||||
appId.setClusterTimestamp(0);
|
|
||||||
appId.setId(0);
|
|
||||||
ApplicationAttemptId appAttemptId =
|
ApplicationAttemptId appAttemptId =
|
||||||
Records.newRecord(ApplicationAttemptId.class);
|
Records.newRecord(ApplicationAttemptId.class);
|
||||||
appAttemptId.setApplicationId(appId);
|
appAttemptId.setApplicationId(appId);
|
||||||
|
@ -220,9 +220,8 @@ public ContainerManager run() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static ContainerId createContainerId() {
|
public static ContainerId createContainerId() {
|
||||||
ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class);
|
ApplicationId appId = ApplicationId.newInstance(0, 0);
|
||||||
appId.setClusterTimestamp(0);
|
|
||||||
appId.setId(0);
|
|
||||||
ApplicationAttemptId appAttemptId =
|
ApplicationAttemptId appAttemptId =
|
||||||
recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
||||||
appAttemptId.setApplicationId(appId);
|
appAttemptId.setApplicationId(appId);
|
||||||
|
@ -151,8 +151,6 @@ public RegisterNodeManagerResponse registerNodeManager(
|
|||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
|
|
||||||
ApplicationId applicationID = recordFactory
|
|
||||||
.newRecordInstance(ApplicationId.class);
|
|
||||||
ApplicationAttemptId appAttemptID = recordFactory
|
ApplicationAttemptId appAttemptID = recordFactory
|
||||||
.newRecordInstance(ApplicationAttemptId.class);
|
.newRecordInstance(ApplicationAttemptId.class);
|
||||||
ContainerId firstContainerID = recordFactory
|
ContainerId firstContainerID = recordFactory
|
||||||
@ -191,12 +189,15 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request)
|
|||||||
getAppToContainerStatusMap(nodeStatus.getContainersStatuses());
|
getAppToContainerStatusMap(nodeStatus.getContainersStatuses());
|
||||||
org.apache.hadoop.yarn.api.records.Container mockContainer =
|
org.apache.hadoop.yarn.api.records.Container mockContainer =
|
||||||
mock(org.apache.hadoop.yarn.api.records.Container.class);
|
mock(org.apache.hadoop.yarn.api.records.Container.class);
|
||||||
|
|
||||||
|
ApplicationId appId1 = ApplicationId.newInstance(0, 1);
|
||||||
|
ApplicationId appId2 = ApplicationId.newInstance(0, 2);
|
||||||
|
|
||||||
if (heartBeatID == 1) {
|
if (heartBeatID == 1) {
|
||||||
Assert.assertEquals(0, nodeStatus.getContainersStatuses().size());
|
Assert.assertEquals(0, nodeStatus.getContainersStatuses().size());
|
||||||
|
|
||||||
// Give a container to the NM.
|
// Give a container to the NM.
|
||||||
applicationID.setId(heartBeatID);
|
appAttemptID.setApplicationId(appId1);
|
||||||
appAttemptID.setApplicationId(applicationID);
|
|
||||||
firstContainerID.setApplicationAttemptId(appAttemptID);
|
firstContainerID.setApplicationAttemptId(appAttemptID);
|
||||||
firstContainerID.setId(heartBeatID);
|
firstContainerID.setId(heartBeatID);
|
||||||
ContainerLaunchContext launchContext = recordFactory
|
ContainerLaunchContext launchContext = recordFactory
|
||||||
@ -213,7 +214,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request)
|
|||||||
Assert.assertEquals("Number of applications should only be one!", 1,
|
Assert.assertEquals("Number of applications should only be one!", 1,
|
||||||
nodeStatus.getContainersStatuses().size());
|
nodeStatus.getContainersStatuses().size());
|
||||||
Assert.assertEquals("Number of container for the app should be one!",
|
Assert.assertEquals("Number of container for the app should be one!",
|
||||||
1, appToContainers.get(applicationID).size());
|
1, appToContainers.get(appId1).size());
|
||||||
|
|
||||||
// Checks on the NM end
|
// Checks on the NM end
|
||||||
ConcurrentMap<ContainerId, Container> activeContainers =
|
ConcurrentMap<ContainerId, Container> activeContainers =
|
||||||
@ -221,8 +222,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request)
|
|||||||
Assert.assertEquals(1, activeContainers.size());
|
Assert.assertEquals(1, activeContainers.size());
|
||||||
|
|
||||||
// Give another container to the NM.
|
// Give another container to the NM.
|
||||||
applicationID.setId(heartBeatID);
|
appAttemptID.setApplicationId(appId2);
|
||||||
appAttemptID.setApplicationId(applicationID);
|
|
||||||
secondContainerID.setApplicationAttemptId(appAttemptID);
|
secondContainerID.setApplicationAttemptId(appAttemptID);
|
||||||
secondContainerID.setId(heartBeatID);
|
secondContainerID.setId(heartBeatID);
|
||||||
ContainerLaunchContext launchContext = recordFactory
|
ContainerLaunchContext launchContext = recordFactory
|
||||||
@ -239,7 +239,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request)
|
|||||||
Assert.assertEquals("Number of applications should only be one!", 1,
|
Assert.assertEquals("Number of applications should only be one!", 1,
|
||||||
appToContainers.size());
|
appToContainers.size());
|
||||||
Assert.assertEquals("Number of container for the app should be two!",
|
Assert.assertEquals("Number of container for the app should be two!",
|
||||||
2, appToContainers.get(applicationID).size());
|
2, appToContainers.get(appId2).size());
|
||||||
|
|
||||||
// Checks on the NM end
|
// Checks on the NM end
|
||||||
ConcurrentMap<ContainerId, Container> activeContainers =
|
ConcurrentMap<ContainerId, Container> activeContainers =
|
||||||
|
@ -18,8 +18,12 @@
|
|||||||
|
|
||||||
package org.apache.hadoop.yarn.server.nodemanager.containermanager;
|
package org.apache.hadoop.yarn.server.nodemanager.containermanager;
|
||||||
|
|
||||||
import org.junit.Test;
|
import static org.apache.hadoop.yarn.service.Service.STATE.INITED;
|
||||||
import static org.junit.Assert.*;
|
import static org.apache.hadoop.yarn.service.Service.STATE.STARTED;
|
||||||
|
import static org.apache.hadoop.yarn.service.Service.STATE.STOPPED;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@ -30,17 +34,10 @@
|
|||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
|
||||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||||
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServices;
|
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEvent;
|
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEventType;
|
|
||||||
import org.apache.hadoop.yarn.service.AbstractService;
|
import org.apache.hadoop.yarn.service.AbstractService;
|
||||||
import org.apache.hadoop.yarn.service.Service;
|
import org.apache.hadoop.yarn.service.Service;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import static org.apache.hadoop.yarn.service.Service.STATE.*;
|
|
||||||
|
|
||||||
public class TestAuxServices {
|
public class TestAuxServices {
|
||||||
private static final Log LOG = LogFactory.getLog(TestAuxServices.class);
|
private static final Log LOG = LogFactory.getLog(TestAuxServices.class);
|
||||||
@ -123,18 +120,17 @@ public void testAuxEventDispatch() {
|
|||||||
aux.init(conf);
|
aux.init(conf);
|
||||||
aux.start();
|
aux.start();
|
||||||
|
|
||||||
ApplicationId appId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class);
|
ApplicationId appId1 = ApplicationId.newInstance(0, 65);
|
||||||
appId.setId(65);
|
|
||||||
ByteBuffer buf = ByteBuffer.allocate(6);
|
ByteBuffer buf = ByteBuffer.allocate(6);
|
||||||
buf.putChar('A');
|
buf.putChar('A');
|
||||||
buf.putInt(65);
|
buf.putInt(65);
|
||||||
buf.flip();
|
buf.flip();
|
||||||
AuxServicesEvent event = new AuxServicesEvent(
|
AuxServicesEvent event = new AuxServicesEvent(
|
||||||
AuxServicesEventType.APPLICATION_INIT, "user0", appId, "Asrv", buf);
|
AuxServicesEventType.APPLICATION_INIT, "user0", appId1, "Asrv", buf);
|
||||||
aux.handle(event);
|
aux.handle(event);
|
||||||
appId.setId(66);
|
ApplicationId appId2 = ApplicationId.newInstance(0, 66);
|
||||||
event = new AuxServicesEvent(
|
event = new AuxServicesEvent(
|
||||||
AuxServicesEventType.APPLICATION_STOP, "user0", appId, "Bsrv", null);
|
AuxServicesEventType.APPLICATION_STOP, "user0", appId2, "Bsrv", null);
|
||||||
// verify all services got the stop event
|
// verify all services got the stop event
|
||||||
aux.handle(event);
|
aux.handle(event);
|
||||||
Collection<AuxServices.AuxiliaryService> servs = aux.getServices();
|
Collection<AuxServices.AuxiliaryService> servs = aux.getServices();
|
||||||
|
@ -78,9 +78,7 @@ public TestContainerManager() throws UnsupportedFileSystemException {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private ContainerId createContainerId() {
|
private ContainerId createContainerId() {
|
||||||
ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class);
|
ApplicationId appId = ApplicationId.newInstance(0, 0);
|
||||||
appId.setClusterTimestamp(0);
|
|
||||||
appId.setId(0);
|
|
||||||
ApplicationAttemptId appAttemptId =
|
ApplicationAttemptId appAttemptId =
|
||||||
recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
||||||
appAttemptId.setApplicationId(appId);
|
appAttemptId.setApplicationId(appId);
|
||||||
|
@ -165,9 +165,7 @@ public void testContainerEnvVariables() throws Exception {
|
|||||||
|
|
||||||
Container mockContainer = mock(Container.class);
|
Container mockContainer = mock(Container.class);
|
||||||
// ////// Construct the Container-id
|
// ////// Construct the Container-id
|
||||||
ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class);
|
ApplicationId appId = ApplicationId.newInstance(0, 0);
|
||||||
appId.setClusterTimestamp(0);
|
|
||||||
appId.setId(0);
|
|
||||||
ApplicationAttemptId appAttemptId =
|
ApplicationAttemptId appAttemptId =
|
||||||
recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
||||||
appAttemptId.setApplicationId(appId);
|
appAttemptId.setApplicationId(appId);
|
||||||
@ -339,9 +337,7 @@ public void testDelayedKill() throws Exception {
|
|||||||
|
|
||||||
Container mockContainer = mock(Container.class);
|
Container mockContainer = mock(Container.class);
|
||||||
// ////// Construct the Container-id
|
// ////// Construct the Container-id
|
||||||
ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class);
|
ApplicationId appId = ApplicationId.newInstance(1, 1);
|
||||||
appId.setClusterTimestamp(1);
|
|
||||||
appId.setId(1);
|
|
||||||
ApplicationAttemptId appAttemptId =
|
ApplicationAttemptId appAttemptId =
|
||||||
recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
||||||
appAttemptId.setApplicationId(appId);
|
appAttemptId.setApplicationId(appId);
|
||||||
|
@ -709,10 +709,7 @@ public void testLogAggregationForRealContainerLaunch() throws IOException,
|
|||||||
recordFactory.newRecordInstance(ContainerLaunchContext.class);
|
recordFactory.newRecordInstance(ContainerLaunchContext.class);
|
||||||
Container mockContainer = mock(Container.class);
|
Container mockContainer = mock(Container.class);
|
||||||
// ////// Construct the Container-id
|
// ////// Construct the Container-id
|
||||||
ApplicationId appId =
|
ApplicationId appId = ApplicationId.newInstance(0, 0);
|
||||||
recordFactory.newRecordInstance(ApplicationId.class);
|
|
||||||
appId.setClusterTimestamp(0);
|
|
||||||
appId.setId(0);
|
|
||||||
ApplicationAttemptId appAttemptId =
|
ApplicationAttemptId appAttemptId =
|
||||||
BuilderUtils.newApplicationAttemptId(appId, 1);
|
BuilderUtils.newApplicationAttemptId(appId, 1);
|
||||||
ContainerId cId = BuilderUtils.newContainerId(appAttemptId, 0);
|
ContainerId cId = BuilderUtils.newContainerId(appAttemptId, 0);
|
||||||
|
@ -204,10 +204,7 @@ public void testContainerKillOnMemoryOverflow() throws IOException,
|
|||||||
recordFactory.newRecordInstance(ContainerLaunchContext.class);
|
recordFactory.newRecordInstance(ContainerLaunchContext.class);
|
||||||
Container mockContainer = mock(Container.class);
|
Container mockContainer = mock(Container.class);
|
||||||
// ////// Construct the Container-id
|
// ////// Construct the Container-id
|
||||||
ApplicationId appId =
|
ApplicationId appId = ApplicationId.newInstance(0, 0);
|
||||||
recordFactory.newRecordInstance(ApplicationId.class);
|
|
||||||
appId.setClusterTimestamp(0);
|
|
||||||
appId.setId(0);
|
|
||||||
ApplicationAttemptId appAttemptId =
|
ApplicationAttemptId appAttemptId =
|
||||||
recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
||||||
appAttemptId.setApplicationId(appId);
|
appAttemptId.setApplicationId(appId);
|
||||||
|
@ -157,8 +157,7 @@ public void testGetApplicationReport() throws YarnRemoteException {
|
|||||||
RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
|
RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
|
||||||
GetApplicationReportRequest request = recordFactory
|
GetApplicationReportRequest request = recordFactory
|
||||||
.newRecordInstance(GetApplicationReportRequest.class);
|
.newRecordInstance(GetApplicationReportRequest.class);
|
||||||
request.setApplicationId(recordFactory
|
request.setApplicationId(ApplicationId.newInstance(0, 0));
|
||||||
.newRecordInstance(ApplicationId.class));
|
|
||||||
GetApplicationReportResponse applicationReport = rmService
|
GetApplicationReportResponse applicationReport = rmService
|
||||||
.getApplicationReport(request);
|
.getApplicationReport(request);
|
||||||
Assert.assertNull("It should return null as application report for absent application.",
|
Assert.assertNull("It should return null as application report for absent application.",
|
||||||
@ -436,11 +435,7 @@ private ConcurrentHashMap<ApplicationId, RMApp> getRMApps(
|
|||||||
}
|
}
|
||||||
|
|
||||||
private ApplicationId getApplicationId(int id) {
|
private ApplicationId getApplicationId(int id) {
|
||||||
ApplicationId applicationId = recordFactory
|
return ApplicationId.newInstance(123456, id);
|
||||||
.newRecordInstance(ApplicationId.class);
|
|
||||||
applicationId.setClusterTimestamp(123456);
|
|
||||||
applicationId.setId(id);
|
|
||||||
return applicationId;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private RMAppImpl getRMApp(RMContext rmContext, YarnScheduler yarnScheduler,
|
private RMAppImpl getRMApp(RMContext rmContext, YarnScheduler yarnScheduler,
|
||||||
|
@ -20,6 +20,7 @@
|
|||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
@ -30,6 +31,7 @@
|
|||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.net.NetworkTopology;
|
import org.apache.hadoop.net.NetworkTopology;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
import org.apache.hadoop.yarn.api.records.Priority;
|
import org.apache.hadoop.yarn.api.records.Priority;
|
||||||
import org.apache.hadoop.yarn.api.records.QueueInfo;
|
import org.apache.hadoop.yarn.api.records.QueueInfo;
|
||||||
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
|
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
|
||||||
@ -51,13 +53,10 @@
|
|||||||
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent;
|
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.security.ClientToAMTokenSecretManagerInRM;
|
import org.apache.hadoop.yarn.server.resourcemanager.security.ClientToAMTokenSecretManagerInRM;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
|
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
|
||||||
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
import static org.mockito.Mockito.*;
|
|
||||||
|
|
||||||
|
|
||||||
public class TestCapacityScheduler {
|
public class TestCapacityScheduler {
|
||||||
@ -468,15 +467,9 @@ public void testApplicationComparator()
|
|||||||
{
|
{
|
||||||
CapacityScheduler cs = new CapacityScheduler();
|
CapacityScheduler cs = new CapacityScheduler();
|
||||||
Comparator<FiCaSchedulerApp> appComparator= cs.getApplicationComparator();
|
Comparator<FiCaSchedulerApp> appComparator= cs.getApplicationComparator();
|
||||||
ApplicationId id1 = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class);
|
ApplicationId id1 = ApplicationId.newInstance(1, 1);
|
||||||
id1.setClusterTimestamp(1);
|
ApplicationId id2 = ApplicationId.newInstance(1, 2);
|
||||||
id1.setId(1);
|
ApplicationId id3 = ApplicationId.newInstance(2, 1);
|
||||||
ApplicationId id2 = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class);
|
|
||||||
id2.setClusterTimestamp(1);
|
|
||||||
id2.setId(2);
|
|
||||||
ApplicationId id3 = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class);
|
|
||||||
id3.setClusterTimestamp(2);
|
|
||||||
id3.setId(1);
|
|
||||||
//same clusterId
|
//same clusterId
|
||||||
FiCaSchedulerApp app1 = Mockito.mock(FiCaSchedulerApp.class);
|
FiCaSchedulerApp app1 = Mockito.mock(FiCaSchedulerApp.class);
|
||||||
when(app1.getApplicationId()).thenReturn(id1);
|
when(app1.getApplicationId()).thenReturn(id1);
|
||||||
|
@ -36,8 +36,7 @@ public class TestFSSchedulerApp {
|
|||||||
|
|
||||||
private ApplicationAttemptId createAppAttemptId(int appId, int attemptId) {
|
private ApplicationAttemptId createAppAttemptId(int appId, int attemptId) {
|
||||||
ApplicationAttemptId attId = recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
ApplicationAttemptId attId = recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
||||||
ApplicationId appIdImpl = recordFactory.newRecordInstance(ApplicationId.class);
|
ApplicationId appIdImpl = ApplicationId.newInstance(0, appId);
|
||||||
appIdImpl.setId(appId);
|
|
||||||
attId.setAttemptId(attemptId);
|
attId.setAttemptId(attemptId);
|
||||||
attId.setApplicationId(appIdImpl);
|
attId.setApplicationId(appIdImpl);
|
||||||
return attId;
|
return attId;
|
||||||
|
@ -143,8 +143,7 @@ private Configuration createConfiguration() {
|
|||||||
|
|
||||||
private ApplicationAttemptId createAppAttemptId(int appId, int attemptId) {
|
private ApplicationAttemptId createAppAttemptId(int appId, int attemptId) {
|
||||||
ApplicationAttemptId attId = recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
ApplicationAttemptId attId = recordFactory.newRecordInstance(ApplicationAttemptId.class);
|
||||||
ApplicationId appIdImpl = recordFactory.newRecordInstance(ApplicationId.class);
|
ApplicationId appIdImpl = ApplicationId.newInstance(0, appId);
|
||||||
appIdImpl.setId(appId);
|
|
||||||
attId.setAttemptId(attemptId);
|
attId.setAttemptId(attemptId);
|
||||||
attId.setApplicationId(appIdImpl);
|
attId.setApplicationId(appIdImpl);
|
||||||
return attId;
|
return attId;
|
||||||
|
@ -97,9 +97,7 @@ public void tearDown() throws Exception {
|
|||||||
private ApplicationAttemptId createAppAttemptId(int appId, int attemptId) {
|
private ApplicationAttemptId createAppAttemptId(int appId, int attemptId) {
|
||||||
ApplicationAttemptId attId = recordFactory
|
ApplicationAttemptId attId = recordFactory
|
||||||
.newRecordInstance(ApplicationAttemptId.class);
|
.newRecordInstance(ApplicationAttemptId.class);
|
||||||
ApplicationId appIdImpl = recordFactory
|
ApplicationId appIdImpl = ApplicationId.newInstance(0, appId);
|
||||||
.newRecordInstance(ApplicationId.class);
|
|
||||||
appIdImpl.setId(appId);
|
|
||||||
attId.setAttemptId(attemptId);
|
attId.setAttemptId(attemptId);
|
||||||
attId.setApplicationId(appIdImpl);
|
attId.setApplicationId(appIdImpl);
|
||||||
return attId;
|
return attId;
|
||||||
|
Loading…
Reference in New Issue
Block a user