MAPREDUCE-3064. 27 unit test failures with Invalid mapreduce.jobtracker.address configuration value for JobTracker: local (Venu Gopala Rao via mahadev)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1175449 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
a5c9ede143
commit
5a3040cad4
@ -1409,6 +1409,7 @@ Release 0.23.0 - Unreleased
|
|||||||
MAPREDUCE-2691. Increase threadpool size for launching containers in
|
MAPREDUCE-2691. Increase threadpool size for launching containers in
|
||||||
MapReduce ApplicationMaster. (vinodkv via acmurthy)
|
MapReduce ApplicationMaster. (vinodkv via acmurthy)
|
||||||
|
|
||||||
|
|
||||||
MAPREDUCE-2990. Fixed display of NodeHealthStatus. (Subroto Sanyal via
|
MAPREDUCE-2990. Fixed display of NodeHealthStatus. (Subroto Sanyal via
|
||||||
acmurthy)
|
acmurthy)
|
||||||
|
|
||||||
@ -1418,6 +1419,10 @@ Release 0.23.0 - Unreleased
|
|||||||
MAPREDUCE-2952. Fixed ResourceManager/MR-client to consume diagnostics
|
MAPREDUCE-2952. Fixed ResourceManager/MR-client to consume diagnostics
|
||||||
for AM failures in a couple of corner cases. (Arun C Murthy via vinodkv)
|
for AM failures in a couple of corner cases. (Arun C Murthy via vinodkv)
|
||||||
|
|
||||||
|
MAPREDUCE-3064. 27 unit test failures with Invalid
|
||||||
|
"mapreduce.jobtracker.address" configuration value for
|
||||||
|
JobTracker: "local" (Venu Gopala Rao via mahadev)
|
||||||
|
|
||||||
Release 0.22.0 - Unreleased
|
Release 0.22.0 - Unreleased
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -37,7 +37,7 @@ public class JobTrackerClientProtocolProvider extends ClientProtocolProvider {
|
|||||||
@Override
|
@Override
|
||||||
public ClientProtocol create(Configuration conf) throws IOException {
|
public ClientProtocol create(Configuration conf) throws IOException {
|
||||||
String framework = conf.get(MRConfig.FRAMEWORK_NAME);
|
String framework = conf.get(MRConfig.FRAMEWORK_NAME);
|
||||||
if (framework != null && !framework.equals("classic")) {
|
if (!MRConfig.CLASSIC_FRAMEWORK_NAME.equals(framework)) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
String tracker = conf.get(JTConfig.JT_IPC_ADDRESS, "local");
|
String tracker = conf.get(JTConfig.JT_IPC_ADDRESS, "local");
|
||||||
|
@ -382,6 +382,7 @@ static JobConf configureJobConf(JobConf conf, String namenode,
|
|||||||
UserGroupInformation ugi) {
|
UserGroupInformation ugi) {
|
||||||
JobConf result = new JobConf(conf);
|
JobConf result = new JobConf(conf);
|
||||||
FileSystem.setDefaultUri(result, namenode);
|
FileSystem.setDefaultUri(result, namenode);
|
||||||
|
result.set(MRConfig.FRAMEWORK_NAME, MRConfig.CLASSIC_FRAMEWORK_NAME);
|
||||||
result.set(JTConfig.JT_IPC_ADDRESS, "localhost:"+jobTrackerPort);
|
result.set(JTConfig.JT_IPC_ADDRESS, "localhost:"+jobTrackerPort);
|
||||||
result.set(JTConfig.JT_HTTP_ADDRESS,
|
result.set(JTConfig.JT_HTTP_ADDRESS,
|
||||||
"127.0.0.1:" + jobTrackerInfoPort);
|
"127.0.0.1:" + jobTrackerInfoPort);
|
||||||
|
@ -24,6 +24,7 @@
|
|||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.mapreduce.Cluster;
|
import org.apache.hadoop.mapreduce.Cluster;
|
||||||
import org.apache.hadoop.mapreduce.Job;
|
import org.apache.hadoop.mapreduce.Job;
|
||||||
|
import org.apache.hadoop.mapreduce.MRConfig;
|
||||||
import org.apache.hadoop.mapreduce.QueueState;
|
import org.apache.hadoop.mapreduce.QueueState;
|
||||||
import org.apache.hadoop.mapreduce.SleepJob;
|
import org.apache.hadoop.mapreduce.SleepJob;
|
||||||
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
||||||
@ -314,6 +315,7 @@ static Job submitSleepJob(final int numMappers, final int numReducers, final lon
|
|||||||
final long reduceSleepTime, boolean shouldComplete, String userInfo,
|
final long reduceSleepTime, boolean shouldComplete, String userInfo,
|
||||||
String queueName, Configuration clientConf) throws IOException,
|
String queueName, Configuration clientConf) throws IOException,
|
||||||
InterruptedException, ClassNotFoundException {
|
InterruptedException, ClassNotFoundException {
|
||||||
|
clientConf.set(MRConfig.FRAMEWORK_NAME, MRConfig.CLASSIC_FRAMEWORK_NAME);
|
||||||
clientConf.set(JTConfig.JT_IPC_ADDRESS, "localhost:"
|
clientConf.set(JTConfig.JT_IPC_ADDRESS, "localhost:"
|
||||||
+ miniMRCluster.getJobTrackerPort());
|
+ miniMRCluster.getJobTrackerPort());
|
||||||
UserGroupInformation ugi;
|
UserGroupInformation ugi;
|
||||||
|
@ -55,6 +55,7 @@ static void configureWordCount(FileSystem fs,
|
|||||||
file.close();
|
file.close();
|
||||||
}
|
}
|
||||||
FileSystem.setDefaultUri(conf, fs.getUri());
|
FileSystem.setDefaultUri(conf, fs.getUri());
|
||||||
|
conf.set(JTConfig.FRAMEWORK_NAME, JTConfig.CLASSIC_FRAMEWORK_NAME);
|
||||||
conf.set(JTConfig.JT_IPC_ADDRESS, jobTracker);
|
conf.set(JTConfig.JT_IPC_ADDRESS, jobTracker);
|
||||||
conf.setJobName("wordcount");
|
conf.setJobName("wordcount");
|
||||||
conf.setInputFormat(TextInputFormat.class);
|
conf.setInputFormat(TextInputFormat.class);
|
||||||
@ -121,6 +122,7 @@ static String launchExternal(URI uri, String jobTracker, JobConf conf,
|
|||||||
file.close();
|
file.close();
|
||||||
}
|
}
|
||||||
FileSystem.setDefaultUri(conf, uri);
|
FileSystem.setDefaultUri(conf, uri);
|
||||||
|
conf.set(JTConfig.FRAMEWORK_NAME, JTConfig.CLASSIC_FRAMEWORK_NAME);
|
||||||
conf.set(JTConfig.JT_IPC_ADDRESS, jobTracker);
|
conf.set(JTConfig.JT_IPC_ADDRESS, jobTracker);
|
||||||
conf.setJobName("wordcount");
|
conf.setJobName("wordcount");
|
||||||
conf.setInputFormat(TextInputFormat.class);
|
conf.setInputFormat(TextInputFormat.class);
|
||||||
|
@ -27,13 +27,14 @@
|
|||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.io.LongWritable;
|
import org.apache.hadoop.io.LongWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
||||||
import org.apache.hadoop.mapred.lib.IdentityReducer;
|
import org.apache.hadoop.mapred.lib.IdentityReducer;
|
||||||
|
import org.apache.hadoop.mapreduce.MRConfig;
|
||||||
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
||||||
import org.apache.hadoop.util.Progressable;
|
import org.apache.hadoop.util.Progressable;
|
||||||
|
|
||||||
@ -67,6 +68,7 @@ public static boolean launchJob(URI fileSys,
|
|||||||
|
|
||||||
// use WordCount example
|
// use WordCount example
|
||||||
FileSystem.setDefaultUri(conf, fileSys);
|
FileSystem.setDefaultUri(conf, fileSys);
|
||||||
|
conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.CLASSIC_FRAMEWORK_NAME);
|
||||||
conf.set(JTConfig.JT_IPC_ADDRESS, jobTracker);
|
conf.set(JTConfig.JT_IPC_ADDRESS, jobTracker);
|
||||||
conf.setJobName("foo");
|
conf.setJobName("foo");
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user