MAPREDUCE-5159. Change ValueAggregatorJob to add APIs which can support binary compatibility with hadoop-1 examples. Contributed by Zhijie Shen.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1480394 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
8dee15abe3
commit
8888d3fc49
@ -221,6 +221,9 @@ Release 2.0.5-beta - UNRELEASED
|
||||
MAPREDUCE-5036. Default shuffle handler port should not be 8080.
|
||||
(Sandy Ryza via tomwhite)
|
||||
|
||||
MAPREDUCE-5159. Change ValueAggregatorJob to add APIs which can support
|
||||
binary compatibility with hadoop-1 examples. (Zhijie Shen via vinodkv)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
MAPREDUCE-4974. Optimising the LineRecordReader initialize() method
|
||||
|
@ -105,12 +105,14 @@ public static JobControl createValueAggregatorJobs(String args[]) throws IOExcep
|
||||
*
|
||||
* @param args the arguments used for job creation. Generic hadoop
|
||||
* arguments are accepted.
|
||||
* @param caller the the caller class.
|
||||
* @return a JobConf object ready for submission.
|
||||
*
|
||||
* @throws IOException
|
||||
* @see GenericOptionsParser
|
||||
*/
|
||||
public static JobConf createValueAggregatorJob(String args[])
|
||||
@SuppressWarnings("rawtypes")
|
||||
public static JobConf createValueAggregatorJob(String args[], Class<?> caller)
|
||||
throws IOException {
|
||||
|
||||
Configuration conf = new Configuration();
|
||||
@ -159,7 +161,7 @@ public static JobConf createValueAggregatorJob(String args[])
|
||||
}
|
||||
String userJarFile = theJob.get("user.jar.file");
|
||||
if (userJarFile == null) {
|
||||
theJob.setJarByClass(ValueAggregator.class);
|
||||
theJob.setJarByClass(caller != null ? caller : ValueAggregatorJob.class);
|
||||
} else {
|
||||
theJob.setJar(userJarFile);
|
||||
}
|
||||
@ -183,6 +185,21 @@ public static JobConf createValueAggregatorJob(String args[])
|
||||
return theJob;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an Aggregate based map/reduce job.
|
||||
*
|
||||
* @param args the arguments used for job creation. Generic hadoop
|
||||
* arguments are accepted.
|
||||
* @return a JobConf object ready for submission.
|
||||
*
|
||||
* @throws IOException
|
||||
* @see GenericOptionsParser
|
||||
*/
|
||||
public static JobConf createValueAggregatorJob(String args[])
|
||||
throws IOException {
|
||||
return createValueAggregatorJob(args, ValueAggregator.class);
|
||||
}
|
||||
|
||||
public static JobConf createValueAggregatorJob(String args[]
|
||||
, Class<? extends ValueAggregatorDescriptor>[] descriptors)
|
||||
throws IOException {
|
||||
@ -200,6 +217,14 @@ public static void setAggregatorDescriptors(JobConf job
|
||||
}
|
||||
}
|
||||
|
||||
public static JobConf createValueAggregatorJob(String args[],
|
||||
Class<? extends ValueAggregatorDescriptor>[] descriptors,
|
||||
Class<?> caller) throws IOException {
|
||||
JobConf job = createValueAggregatorJob(args, caller);
|
||||
setAggregatorDescriptors(job, descriptors);
|
||||
return job;
|
||||
}
|
||||
|
||||
/**
|
||||
* create and run an Aggregate based map/reduce job.
|
||||
*
|
||||
|
Loading…
Reference in New Issue
Block a user