Merge trunk into HA branch.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-1623@1213390 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
commit
7c2a8c7ff2
@ -261,6 +261,9 @@ Release 0.23.1 - Unreleased
|
|||||||
MAPREDUCE-3519. Fixed a deadlock in NodeManager LocalDirectories's handling
|
MAPREDUCE-3519. Fixed a deadlock in NodeManager LocalDirectories's handling
|
||||||
service. (Ravi Gummadi via vinodkv)
|
service. (Ravi Gummadi via vinodkv)
|
||||||
|
|
||||||
|
MAPREDUCE-3527. Fix minor API incompatibilities between 1.0 and 0.23.
|
||||||
|
(tomwhite)
|
||||||
|
|
||||||
Release 0.23.0 - 2011-11-01
|
Release 0.23.0 - 2011-11-01
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -41,5 +41,13 @@ public InvalidJobConfException() {
|
|||||||
public InvalidJobConfException(String msg) {
|
public InvalidJobConfException(String msg) {
|
||||||
super(msg);
|
super(msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public InvalidJobConfException(String msg, Throwable t) {
|
||||||
|
super(msg, t);
|
||||||
|
}
|
||||||
|
|
||||||
|
public InvalidJobConfException(Throwable t) {
|
||||||
|
super(t);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -420,6 +420,16 @@ public boolean isRetired() throws IOException {
|
|||||||
boolean monitorAndPrintJob() throws IOException, InterruptedException {
|
boolean monitorAndPrintJob() throws IOException, InterruptedException {
|
||||||
return job.monitorAndPrintJob();
|
return job.monitorAndPrintJob();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getFailureInfo() throws IOException {
|
||||||
|
try {
|
||||||
|
return job.getStatus().getFailureInfo();
|
||||||
|
} catch (InterruptedException ie) {
|
||||||
|
throw new IOException(ie);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Cluster cluster;
|
Cluster cluster;
|
||||||
|
@ -223,4 +223,11 @@ public TaskCompletionEvent[] getTaskCompletionEvents(int startFrom)
|
|||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public boolean isRetired() throws IOException;
|
public boolean isRetired() throws IOException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get failure info for the job.
|
||||||
|
* @return the failure info for the job.
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
public String getFailureInfo() throws IOException;
|
||||||
}
|
}
|
||||||
|
@ -23,7 +23,10 @@
|
|||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.PathFilter;
|
import org.apache.hadoop.fs.PathFilter;
|
||||||
|
import org.apache.hadoop.mapred.FileInputFormat;
|
||||||
import org.apache.hadoop.mapred.InputFormat;
|
import org.apache.hadoop.mapred.InputFormat;
|
||||||
import org.apache.hadoop.mapred.InputSplit;
|
import org.apache.hadoop.mapred.InputSplit;
|
||||||
import org.apache.hadoop.mapred.JobConf;
|
import org.apache.hadoop.mapred.JobConf;
|
||||||
@ -115,5 +118,18 @@ public org.apache.hadoop.mapreduce.RecordReader<K, V> createRecordReader(
|
|||||||
TaskAttemptContext context) throws IOException {
|
TaskAttemptContext context) throws IOException {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** List input directories.
|
||||||
|
* Subclasses may override to, e.g., select only files matching a regular
|
||||||
|
* expression.
|
||||||
|
*
|
||||||
|
* @param job the job to list input paths for
|
||||||
|
* @return array of FileStatus objects
|
||||||
|
* @throws IOException if zero items.
|
||||||
|
*/
|
||||||
|
protected FileStatus[] listStatus(JobConf job) throws IOException {
|
||||||
|
List<FileStatus> result = super.listStatus(new Job(job));
|
||||||
|
return result.toArray(new FileStatus[result.size()]);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -90,6 +90,12 @@ public JobID getJobID() {
|
|||||||
public TaskID getTaskID() {
|
public TaskID getTaskID() {
|
||||||
return taskId;
|
return taskId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**Returns whether this TaskID is a map ID */
|
||||||
|
@Deprecated
|
||||||
|
public boolean isMap() {
|
||||||
|
return taskId.isMap();
|
||||||
|
}
|
||||||
|
|
||||||
/**Returns the TaskType of the TaskAttemptID */
|
/**Returns the TaskType of the TaskAttemptID */
|
||||||
public TaskType getTaskType() {
|
public TaskType getTaskType() {
|
||||||
|
@ -100,6 +100,12 @@ public TaskID() {
|
|||||||
public JobID getJobID() {
|
public JobID getJobID() {
|
||||||
return jobId;
|
return jobId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**Returns whether this TaskID is a map ID */
|
||||||
|
@Deprecated
|
||||||
|
public boolean isMap() {
|
||||||
|
return type == TaskType.MAP;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the type of the task
|
* Get the type of the task
|
||||||
|
@ -50,6 +50,8 @@ public class KeyValueLineRecordReader extends RecordReader<Text, Text> {
|
|||||||
|
|
||||||
private Text value;
|
private Text value;
|
||||||
|
|
||||||
|
public Class getKeyClass() { return Text.class; }
|
||||||
|
|
||||||
public KeyValueLineRecordReader(Configuration conf)
|
public KeyValueLineRecordReader(Configuration conf)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user