MAPREDUCE-4117. mapred job -status throws NullPointerException (Devaraj K via bobby)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1311479 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Joseph Evans 2012-04-09 21:52:50 +00:00
parent 706394d039
commit 48ab08f1c6
3 changed files with 61 additions and 0 deletions

View File

@ -280,6 +280,9 @@ Release 0.23.3 - UNRELEASED
MAPREDUCE-4051. Remove the empty hadoop-mapreduce-project/assembly/all.xml
file (Ravi Prakash via bobby)
MAPREDUCE-4117. mapred job -status throws NullPointerException (Devaraj K
via bobby)
Release 0.23.2 - UNRELEASED
INCOMPATIBLE CHANGES

View File

@ -509,6 +509,11 @@ public TaskCompletionEvent[] run() throws IOException,
lastEvent = event;
}
}
if (lastEvent == null) {
return "There are no failed tasks for the job. "
+ "Job is failed due to some other reason and reason "
+ "can be found in the logs.";
}
String[] taskAttemptID = lastEvent.getTaskAttemptId().toString().split("_", 2);
String taskID = taskAttemptID[1].substring(0, taskAttemptID[1].length()-2);
return (" task " + taskID + " failed " +

View File

@ -0,0 +1,53 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.IOException;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.JobStatus.State;
import org.apache.hadoop.mapreduce.protocol.ClientProtocol;
import org.junit.Assert;
import org.junit.Test;
public class TestJob {
@Test
public void testJobToString() throws IOException, InterruptedException {
Cluster cluster = mock(Cluster.class);
ClientProtocol client = mock(ClientProtocol.class);
when(cluster.getClient()).thenReturn(client);
JobID jobid = new JobID("1014873536921", 6);
JobStatus status = new JobStatus(jobid, 0.0f, 0.0f, 0.0f, 0.0f,
State.FAILED, JobPriority.NORMAL, "root", "TestJobToString",
"job file", "tracking url");
when(client.getJobStatus(jobid)).thenReturn(status);
when(client.getTaskReports(jobid, TaskType.MAP)).thenReturn(
new TaskReport[0]);
when(client.getTaskReports(jobid, TaskType.REDUCE)).thenReturn(
new TaskReport[0]);
when(client.getTaskCompletionEvents(jobid, 0, 10)).thenReturn(
new TaskCompletionEvent[0]);
Job job = Job.getInstance(cluster, status, new JobConf());
Assert.assertNotNull(job.toString());
}
}