MAPREDUCE-3527. Fix minor API incompatibilities between 1.0 and 0.23.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1213350 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Thomas White 2011-12-12 18:29:20 +00:00
parent 0f70398292
commit 6571d39a74
8 changed files with 58 additions and 0 deletions

View File

@ -261,6 +261,9 @@ Release 0.23.1 - Unreleased
MAPREDUCE-3519. Fixed a deadlock in NodeManager LocalDirectories's handling MAPREDUCE-3519. Fixed a deadlock in NodeManager LocalDirectories's handling
service. (Ravi Gummadi via vinodkv) service. (Ravi Gummadi via vinodkv)
MAPREDUCE-3527. Fix minor API incompatibilities between 1.0 and 0.23.
(tomwhite)
Release 0.23.0 - 2011-11-01 Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -42,4 +42,12 @@ public InvalidJobConfException(String msg) {
super(msg); super(msg);
} }
public InvalidJobConfException(String msg, Throwable t) {
super(msg, t);
}
public InvalidJobConfException(Throwable t) {
super(t);
}
} }

View File

@ -420,6 +420,16 @@ public boolean isRetired() throws IOException {
boolean monitorAndPrintJob() throws IOException, InterruptedException { boolean monitorAndPrintJob() throws IOException, InterruptedException {
return job.monitorAndPrintJob(); return job.monitorAndPrintJob();
} }
@Override
public String getFailureInfo() throws IOException {
try {
return job.getStatus().getFailureInfo();
} catch (InterruptedException ie) {
throw new IOException(ie);
}
}
} }
Cluster cluster; Cluster cluster;

View File

@ -223,4 +223,11 @@ public TaskCompletionEvent[] getTaskCompletionEvents(int startFrom)
* @throws IOException * @throws IOException
*/ */
public boolean isRetired() throws IOException; public boolean isRetired() throws IOException;
/**
* Get failure info for the job.
* @return the failure info for the job.
* @throws IOException
*/
public String getFailureInfo() throws IOException;
} }

View File

@ -23,7 +23,10 @@
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
@ -116,4 +119,17 @@ public org.apache.hadoop.mapreduce.RecordReader<K, V> createRecordReader(
return null; return null;
} }
/** List input directories.
* Subclasses may override to, e.g., select only files matching a regular
* expression.
*
* @param job the job to list input paths for
* @return array of FileStatus objects
* @throws IOException if zero items.
*/
protected FileStatus[] listStatus(JobConf job) throws IOException {
List<FileStatus> result = super.listStatus(new Job(job));
return result.toArray(new FileStatus[result.size()]);
}
} }

View File

@ -91,6 +91,12 @@ public TaskID getTaskID() {
return taskId; return taskId;
} }
/**Returns whether this TaskID is a map ID */
@Deprecated
public boolean isMap() {
return taskId.isMap();
}
/**Returns the TaskType of the TaskAttemptID */ /**Returns the TaskType of the TaskAttemptID */
public TaskType getTaskType() { public TaskType getTaskType() {
return taskId.getTaskType(); return taskId.getTaskType();

View File

@ -101,6 +101,12 @@ public JobID getJobID() {
return jobId; return jobId;
} }
/**Returns whether this TaskID is a map ID */
@Deprecated
public boolean isMap() {
return type == TaskType.MAP;
}
/** /**
* Get the type of the task * Get the type of the task
*/ */

View File

@ -50,6 +50,8 @@ public class KeyValueLineRecordReader extends RecordReader<Text, Text> {
private Text value; private Text value;
public Class getKeyClass() { return Text.class; }
public KeyValueLineRecordReader(Configuration conf) public KeyValueLineRecordReader(Configuration conf)
throws IOException { throws IOException {