MAPREDUCE-4419. ./mapred queue -info <queuename> -showJobs displays all the jobs irrespective of <queuename> (Devaraj K via bobby)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1361389 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
9dcd317d9b
commit
228736ab51
@ -682,6 +682,9 @@ Release 0.23.3 - UNRELEASED
|
|||||||
MAPREDUCE-3940. ContainerTokens should have an expiry interval. (Siddharth
|
MAPREDUCE-3940. ContainerTokens should have an expiry interval. (Siddharth
|
||||||
Seth and Vinod Kumar Vavilapalli via vinodkv)
|
Seth and Vinod Kumar Vavilapalli via vinodkv)
|
||||||
|
|
||||||
|
MAPREDUCE-4419. ./mapred queue -info <queuename> -showJobs displays all
|
||||||
|
the jobs irrespective of <queuename> (Devaraj K via bobby)
|
||||||
|
|
||||||
Release 0.23.2 - UNRELEASED
|
Release 0.23.2 - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -25,6 +25,7 @@
|
|||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configured;
|
import org.apache.hadoop.conf.Configured;
|
||||||
|
import org.apache.hadoop.mapreduce.JobStatus;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.util.Tool;
|
import org.apache.hadoop.util.Tool;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
@ -184,7 +185,7 @@ private void displayQueueInfo(String queue, boolean showJobs)
|
|||||||
printJobQueueInfo(jobQueueInfo, new PrintWriter(System.out));
|
printJobQueueInfo(jobQueueInfo, new PrintWriter(System.out));
|
||||||
if (showJobs && (jobQueueInfo.getChildren() == null ||
|
if (showJobs && (jobQueueInfo.getChildren() == null ||
|
||||||
jobQueueInfo.getChildren().size() == 0)) {
|
jobQueueInfo.getChildren().size() == 0)) {
|
||||||
JobStatus[] jobs = jc.getJobsFromQueue(queue);
|
JobStatus[] jobs = jobQueueInfo.getJobStatuses();
|
||||||
if (jobs == null)
|
if (jobs == null)
|
||||||
jobs = new JobStatus[0];
|
jobs = new JobStatus[0];
|
||||||
jc.displayJobList(jobs);
|
jc.displayJobList(jobs);
|
||||||
|
@ -238,7 +238,7 @@ public static JobStatus downgrade(org.apache.hadoop.mapreduce.JobStatus stat){
|
|||||||
stat.getSetupProgress(), stat.getMapProgress(), stat.getReduceProgress(),
|
stat.getSetupProgress(), stat.getMapProgress(), stat.getReduceProgress(),
|
||||||
stat.getCleanupProgress(), stat.getState().getValue(),
|
stat.getCleanupProgress(), stat.getState().getValue(),
|
||||||
JobPriority.valueOf(stat.getPriority().name()),
|
JobPriority.valueOf(stat.getPriority().name()),
|
||||||
stat.getUsername(), stat.getJobName(), stat.getJobFile(),
|
stat.getUsername(), stat.getJobName(), stat.getQueue(), stat.getJobFile(),
|
||||||
stat.getTrackingUrl(), stat.isUber());
|
stat.getTrackingUrl(), stat.isUber());
|
||||||
old.setStartTime(stat.getStartTime());
|
old.setStartTime(stat.getStartTime());
|
||||||
old.setFinishTime(stat.getFinishTime());
|
old.setFinishTime(stat.getFinishTime());
|
||||||
|
@ -389,9 +389,11 @@ public GetQueueInfoResponse getQueueInfo(GetQueueInfoRequest request)
|
|||||||
appReports = new ArrayList<ApplicationReport>(
|
appReports = new ArrayList<ApplicationReport>(
|
||||||
apps.size());
|
apps.size());
|
||||||
for (RMApp app : apps) {
|
for (RMApp app : apps) {
|
||||||
|
if (app.getQueue().equals(queueInfo.getQueueName())) {
|
||||||
appReports.add(app.createAndGetApplicationReport(true));
|
appReports.add(app.createAndGetApplicationReport(true));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
queueInfo.setApplications(appReports);
|
queueInfo.setApplications(appReports);
|
||||||
response.setQueueInfo(queueInfo);
|
response.setQueueInfo(queueInfo);
|
||||||
} catch (IOException ioe) {
|
} catch (IOException ioe) {
|
||||||
|
@ -20,7 +20,10 @@
|
|||||||
|
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
import static org.mockito.Matchers.anyBoolean;
|
||||||
|
import static org.mockito.Matchers.anyString;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
@ -34,13 +37,21 @@
|
|||||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
|
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
|
||||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
|
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
|
||||||
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest;
|
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest;
|
||||||
|
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest;
|
||||||
|
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeReport;
|
import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||||
|
import org.apache.hadoop.yarn.api.records.QueueInfo;
|
||||||
|
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||||
|
import org.apache.hadoop.yarn.event.Dispatcher;
|
||||||
import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
|
import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
|
||||||
import org.apache.hadoop.yarn.factories.RecordFactory;
|
import org.apache.hadoop.yarn.factories.RecordFactory;
|
||||||
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
||||||
import org.apache.hadoop.yarn.ipc.YarnRPC;
|
import org.apache.hadoop.yarn.ipc.YarnRPC;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
|
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
|
||||||
|
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppImpl;
|
||||||
|
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
|
||||||
import org.apache.hadoop.yarn.util.Records;
|
import org.apache.hadoop.yarn.util.Records;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
@ -49,6 +60,9 @@ public class TestClientRMService {
|
|||||||
|
|
||||||
private static final Log LOG = LogFactory.getLog(TestClientRMService.class);
|
private static final Log LOG = LogFactory.getLog(TestClientRMService.class);
|
||||||
|
|
||||||
|
private RecordFactory recordFactory = RecordFactoryProvider
|
||||||
|
.getRecordFactory(null);
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGetClusterNodes() throws Exception {
|
public void testGetClusterNodes() throws Exception {
|
||||||
MockRM rm = new MockRM() {
|
MockRM rm = new MockRM() {
|
||||||
@ -109,4 +123,66 @@ public void testGetApplicationReport() throws YarnRemoteException {
|
|||||||
Assert.assertNull("It should return null as application report for absent application.",
|
Assert.assertNull("It should return null as application report for absent application.",
|
||||||
applicationReport.getApplicationReport());
|
applicationReport.getApplicationReport());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGetQueueInfo() throws Exception {
|
||||||
|
YarnScheduler yarnScheduler = mock(YarnScheduler.class);
|
||||||
|
RMContext rmContext = mock(RMContext.class);
|
||||||
|
mockRMContext(yarnScheduler, rmContext);
|
||||||
|
ClientRMService rmService = new ClientRMService(rmContext, yarnScheduler,
|
||||||
|
null, null, null);
|
||||||
|
GetQueueInfoRequest request = recordFactory
|
||||||
|
.newRecordInstance(GetQueueInfoRequest.class);
|
||||||
|
request.setQueueName("testqueue");
|
||||||
|
request.setIncludeApplications(true);
|
||||||
|
GetQueueInfoResponse queueInfo = rmService.getQueueInfo(request);
|
||||||
|
List<ApplicationReport> applications = queueInfo.getQueueInfo()
|
||||||
|
.getApplications();
|
||||||
|
Assert.assertEquals(2, applications.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
private void mockRMContext(YarnScheduler yarnScheduler, RMContext rmContext)
|
||||||
|
throws IOException {
|
||||||
|
Dispatcher dispatcher = mock(Dispatcher.class);
|
||||||
|
when(rmContext.getDispatcher()).thenReturn(dispatcher);
|
||||||
|
QueueInfo queInfo = recordFactory.newRecordInstance(QueueInfo.class);
|
||||||
|
queInfo.setQueueName("testqueue");
|
||||||
|
when(yarnScheduler.getQueueInfo(anyString(), anyBoolean(), anyBoolean()))
|
||||||
|
.thenReturn(queInfo);
|
||||||
|
ConcurrentHashMap<ApplicationId, RMApp> apps = getRMApps(rmContext,
|
||||||
|
yarnScheduler);
|
||||||
|
when(rmContext.getRMApps()).thenReturn(apps);
|
||||||
|
}
|
||||||
|
|
||||||
|
private ConcurrentHashMap<ApplicationId, RMApp> getRMApps(
|
||||||
|
RMContext rmContext, YarnScheduler yarnScheduler) {
|
||||||
|
ConcurrentHashMap<ApplicationId, RMApp> apps =
|
||||||
|
new ConcurrentHashMap<ApplicationId, RMApp>();
|
||||||
|
ApplicationId applicationId1 = getApplicationId(1);
|
||||||
|
ApplicationId applicationId2 = getApplicationId(2);
|
||||||
|
ApplicationId applicationId3 = getApplicationId(3);
|
||||||
|
YarnConfiguration config = new YarnConfiguration();
|
||||||
|
apps.put(applicationId1, getRMApp(rmContext, yarnScheduler, applicationId1,
|
||||||
|
config, "testqueue"));
|
||||||
|
apps.put(applicationId2, getRMApp(rmContext, yarnScheduler, applicationId2,
|
||||||
|
config, "a"));
|
||||||
|
apps.put(applicationId3, getRMApp(rmContext, yarnScheduler, applicationId3,
|
||||||
|
config, "testqueue"));
|
||||||
|
return apps;
|
||||||
|
}
|
||||||
|
|
||||||
|
private ApplicationId getApplicationId(int id) {
|
||||||
|
ApplicationId applicationId = recordFactory
|
||||||
|
.newRecordInstance(ApplicationId.class);
|
||||||
|
applicationId.setClusterTimestamp(123456);
|
||||||
|
applicationId.setId(id);
|
||||||
|
return applicationId;
|
||||||
|
}
|
||||||
|
|
||||||
|
private RMAppImpl getRMApp(RMContext rmContext, YarnScheduler yarnScheduler,
|
||||||
|
ApplicationId applicationId3, YarnConfiguration config, String queueName) {
|
||||||
|
return new RMAppImpl(applicationId3, rmContext, config, null, null,
|
||||||
|
queueName, null, null, null, yarnScheduler, null, System
|
||||||
|
.currentTimeMillis());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user