diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt
index f24ec88669..e24658e4d4 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -253,6 +253,8 @@ Release 2.0.0 - UNRELEASED
MAPREDUCE-4008. ResourceManager throws MetricsException on start up
saying QueueMetrics MBean already exists (Devaraj K via tgraves)
+ MAPREDUCE-3867. MiniMRYarn/MiniYarn uses fixed ports (tucu)
+
Release 0.23.3 - UNRELEASED
INCOMPATIBLE CHANGES
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
index 5c7b55270e..a401763114 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
@@ -78,6 +78,7 @@
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.YarnException;
import org.apache.hadoop.yarn.api.records.DelegationToken;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -147,6 +148,18 @@ public void start() {
+ ":" + server.getPort());
LOG.info("Instantiated MRClientService at " + this.bindAddress);
+ if (getConfig().getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) {
+ String resolvedAddress = bindAddress.getHostName() + ":" + bindAddress.getPort();
+ conf.set(JHAdminConfig.MR_HISTORY_ADDRESS, resolvedAddress);
+
+ String hostname = getConfig().get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
+ JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS);
+ hostname = (hostname.contains(":")) ? hostname.substring(0, hostname.indexOf(":")) : hostname;
+ int port = webApp.port();
+ resolvedAddress = hostname + ":" + port;
+ conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, resolvedAddress);
+ }
+
super.start();
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java
index 48e76f42ed..9d7626defb 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java
@@ -83,7 +83,9 @@ public void testSocketFactory() throws IOException {
JobConf jobConf = new JobConf();
FileSystem.setDefaultUri(jobConf, fs.getUri().toString());
miniMRYarnCluster = initAndStartMiniMRYarnCluster(jobConf);
- JobConf jconf = new JobConf(cconf);
+ JobConf jconf = new JobConf(miniMRYarnCluster.getConfig());
+ jconf.set("hadoop.rpc.socket.factory.class.default",
+ "org.apache.hadoop.ipc.DummySocketFactory");
jconf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
String rmAddress = jconf.get("yarn.resourcemanager.address");
String[] split = rmAddress.split(":");
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java
index 6a81a41552..d5dcfac99b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java
@@ -173,7 +173,7 @@ public void testClassPath() throws IOException {
fileSys = dfs.getFileSystem();
namenode = fileSys.getUri().toString();
mr = new MiniMRCluster(taskTrackers, namenode, 3);
- JobConf jobConf = new JobConf();
+ JobConf jobConf = mr.createJobConf();
String result;
result = launchWordCount(fileSys.getUri(), jobConf,
"The quick brown fox\nhas many silly\n" + "red fox sox\n", 3, 1);
@@ -205,7 +205,7 @@ public void testExternalWritable()
fileSys = dfs.getFileSystem();
namenode = fileSys.getUri().toString();
mr = new MiniMRCluster(taskTrackers, namenode, 3);
- JobConf jobConf = new JobConf();
+ JobConf jobConf = mr.createJobConf();
String result;
result = launchExternal(fileSys.getUri(), jobConf,
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
index bb56bd8e8c..7e4a428beb 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
@@ -20,6 +20,8 @@
import java.io.File;
import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -126,6 +128,10 @@ public JobHistoryServerWrapper() {
@Override
public synchronized void start() {
try {
+ getConfig().set(JHAdminConfig.MR_HISTORY_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
+ getConfig().set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
historyServer = new JobHistoryServer();
historyServer.init(getConfig());
new Thread() {
@@ -145,6 +151,20 @@ public void run() {
} catch (Throwable t) {
throw new YarnException(t);
}
+ //need to do this because historyServer.init creates a new Configuration
+ getConfig().set(JHAdminConfig.MR_HISTORY_ADDRESS,
+ historyServer.getConfig().get(JHAdminConfig.MR_HISTORY_ADDRESS));
+ getConfig().set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
+ historyServer.getConfig().get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS));
+
+ LOG.info("MiniMRYARN ResourceManager address: " +
+ getConfig().get(YarnConfiguration.RM_ADDRESS));
+ LOG.info("MiniMRYARN ResourceManager web address: " +
+ getConfig().get(YarnConfiguration.RM_WEBAPP_ADDRESS));
+ LOG.info("MiniMRYARN HistoryServer address: " +
+ getConfig().get(JHAdminConfig.MR_HISTORY_ADDRESS));
+ LOG.info("MiniMRYARN HistoryServer web address: " +
+ getConfig().get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS));
}
@Override
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java
index 8d67929b54..41e8e6defc 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java
@@ -197,12 +197,18 @@ public static void main(String[] args) {
/**
*/
- public Client() throws Exception {
+ public Client(Configuration conf) throws Exception {
// Set up the configuration and RPC
- conf = new Configuration();
+ this.conf = conf;
rpc = YarnRPC.create(conf);
}
+ /**
+ */
+ public Client() throws Exception {
+ this(new Configuration());
+ }
+
/**
* Helper function to print out usage
* @param opts Parsed command line options
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
index a81dd77e71..9aefb0fdfc 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
@@ -18,11 +18,17 @@
package org.apache.hadoop.yarn.applications.distributedshell;
+import java.io.File;
+import java.io.FileOutputStream;
import java.io.IOException;
+import java.io.OutputStream;
+import java.net.URL;
+import junit.framework.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.JarFinder;
import org.apache.hadoop.yarn.server.MiniYARNCluster;
import org.junit.AfterClass;
@@ -48,6 +54,14 @@ public static void setup() throws InterruptedException, IOException {
1, 1, 1);
yarnCluster.init(conf);
yarnCluster.start();
+ URL url = Thread.currentThread().getContextClassLoader().getResource("yarn-site.xml");
+ if (url == null) {
+ throw new RuntimeException("Could not find 'yarn-site.xml' dummy file in classpath");
+ }
+ yarnCluster.getConfig().set("yarn.application.classpath", new File(url.getPath()).getParent());
+ OutputStream os = new FileOutputStream(new File(url.getPath()));
+ yarnCluster.getConfig().writeXml(os);
+ os.close();
}
try {
Thread.sleep(2000);
@@ -81,14 +95,14 @@ public void testDSShell() throws Exception {
};
LOG.info("Initializing DS Client");
- Client client = new Client();
+ Client client = new Client(new Configuration(yarnCluster.getConfig()));
boolean initSuccess = client.init(args);
- assert(initSuccess);
+ Assert.assertTrue(initSuccess);
LOG.info("Running DS Client");
boolean result = client.run();
LOG.info("Client run completed. Result=" + result);
- assert (result == true);
+ Assert.assertTrue(result);
}
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/resources/yarn-site.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/resources/yarn-site.xml
new file mode 100644
index 0000000000..9660ace468
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/resources/yarn-site.xml
@@ -0,0 +1,21 @@
+
+
+
+
+
+
+
+
+
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 407c3a690d..c80b62022b 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -538,6 +538,8 @@ public class YarnConfiguration extends Configuration {
/** Container temp directory */
public static final String DEFAULT_CONTAINER_TEMP_DIR = "./tmp";
+ public static final String IS_MINI_YARN_CLUSTER = YARN_PREFIX + ".is.minicluster";
+
public YarnConfiguration() {
super();
}
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java
index 583304481b..94e6d9c636 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java
@@ -120,6 +120,11 @@ public void start() {
}
this.server.start();
+ if (getConfig().getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) {
+ String resolvedAddress =
+ this.server.getListenerAddress().getHostName() + ":" + this.server.getListenerAddress().getPort();
+ conf.set(YarnConfiguration.RM_ADMIN_ADDRESS, resolvedAddress);
+ }
super.start();
}
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java
index d9b4f8ae48..952788e019 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java
@@ -119,11 +119,14 @@ public void start() {
}
this.server.start();
-
this.bindAddress =
NetUtils.createSocketAddr(masterServiceAddress.getHostName(),
this.server.getPort());
-
+ if (getConfig().getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) {
+ String resolvedAddress =
+ this.server.getListenerAddress().getHostName() + ":" + this.server.getListenerAddress().getPort();
+ conf.set(YarnConfiguration.RM_SCHEDULER_ADDRESS, resolvedAddress);
+ }
super.start();
}
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
index fe3f6a25c4..12fa689925 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
@@ -150,6 +150,11 @@ public void start() {
}
this.server.start();
+ if (getConfig().getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) {
+ String resolvedAddress =
+ this.server.getListenerAddress().getHostName() + ":" + this.server.getListenerAddress().getPort();
+ conf.set(YarnConfiguration.RM_ADDRESS, resolvedAddress);
+ }
super.start();
}
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
index 2336ba8285..b267226533 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
@@ -20,6 +20,7 @@
import java.io.IOException;
+import java.net.InetAddress;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
@@ -475,6 +476,15 @@ public void start() {
} catch(IOException ie) {
throw new YarnException("Failed to start secret manager threads", ie);
}
+
+ if (getConfig().getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) {
+ String hostname = getConfig().get(YarnConfiguration.RM_WEBAPP_ADDRESS,
+ YarnConfiguration.DEFAULT_RM_WEBAPP_ADDRESS);
+ hostname = (hostname.contains(":")) ? hostname.substring(0, hostname.indexOf(":")) : hostname;
+ int port = webApp.port();
+ String resolvedAddress = hostname + ":" + port;
+ conf.set(YarnConfiguration.RM_WEBAPP_ADDRESS, resolvedAddress);
+ }
super.start();
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
index 6ffc9b2315..7881dbb2c7 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
@@ -133,6 +133,11 @@ public synchronized void start() {
}
this.server.start();
+ if (getConfig().getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) {
+ String resolvedAddress =
+ server.getListenerAddress().getHostName() + ":" + server.getListenerAddress().getPort();
+ conf.set(YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, resolvedAddress);
+ }
}
@Override
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
index 37cbcd6369..a011ed817f 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
@@ -20,6 +20,9 @@
import java.io.File;
import java.io.IOException;
+import java.net.InetAddress;
+import java.net.ServerSocket;
+import java.net.UnknownHostException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -113,7 +116,16 @@ public ResourceManager getResourceManager() {
public NodeManager getNodeManager(int i) {
return this.nodeManagers[i];
}
-
+
+ public static String getHostname() {
+ try {
+ return InetAddress.getLocalHost().getHostName();
+ }
+ catch (UnknownHostException ex) {
+ throw new RuntimeException(ex);
+ }
+ }
+
private class ResourceManagerWrapper extends AbstractService {
public ResourceManagerWrapper() {
super(ResourceManagerWrapper.class.getName());
@@ -122,6 +134,19 @@ public ResourceManagerWrapper() {
@Override
public synchronized void start() {
try {
+ getConfig().setBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, true);
+ getConfig().set(YarnConfiguration.RM_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
+ getConfig().set(YarnConfiguration.RM_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
+ getConfig().set(YarnConfiguration.RM_ADMIN_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
+ getConfig().set(YarnConfiguration.RM_SCHEDULER_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
+ getConfig().set(YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
+ getConfig().set(YarnConfiguration.RM_WEBAPP_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
Store store = StoreFactory.getStore(getConfig());
resourceManager = new ResourceManager(store) {
@Override
@@ -151,6 +176,10 @@ public void run() {
} catch (Throwable t) {
throw new YarnException(t);
}
+ LOG.info("MiniYARN ResourceManager address: " +
+ getConfig().get(YarnConfiguration.RM_ADDRESS));
+ LOG.info("MiniYARN ResourceManager web address: " +
+ getConfig().get(YarnConfiguration.RM_WEBAPP_ADDRESS));
}
@Override
@@ -212,9 +241,12 @@ public synchronized void start() {
remoteLogDir.getAbsolutePath());
// By default AM + 2 containers
getConfig().setInt(YarnConfiguration.NM_PMEM_MB, 4*1024);
- getConfig().set(YarnConfiguration.NM_ADDRESS, "0.0.0.0:0");
- getConfig().set(YarnConfiguration.NM_LOCALIZER_ADDRESS, "0.0.0.0:0");
- getConfig().set(YarnConfiguration.NM_WEBAPP_ADDRESS, "0.0.0.0:0");
+ getConfig().set(YarnConfiguration.NM_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
+ getConfig().set(YarnConfiguration.NM_LOCALIZER_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
+ getConfig().set(YarnConfiguration.NM_WEBAPP_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
LOG.info("Starting NM: " + index);
nodeManagers[index].init(getConfig());
new Thread() {
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestFileArgs.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestFileArgs.java
index 3b4a9f3f6a..ac577e4c7e 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestFileArgs.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestFileArgs.java
@@ -20,12 +20,13 @@
import java.io.DataOutputStream;
import java.io.IOException;
+import java.util.Map;
+
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.junit.After;
import org.junit.Before;
@@ -38,8 +39,6 @@ public class TestFileArgs extends TestStreaming
private MiniDFSCluster dfs = null;
private MiniMRCluster mr = null;
private FileSystem fileSys = null;
- private String strJobTracker = null;
- private String strNamenode = null;
private String namenode = null;
private Configuration conf = null;
@@ -56,8 +55,6 @@ public TestFileArgs() throws IOException
fileSys = dfs.getFileSystem();
namenode = fileSys.getUri().getAuthority();
mr = new MiniMRCluster(1, namenode, 1);
- strJobTracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.createJobConf().get(JTConfig.JT_IPC_ADDRESS);
- strNamenode = "fs.default.name=" + mr.createJobConf().get("fs.default.name");
map = LS_PATH;
FileSystem.setDefaultUri(conf, "hdfs://" + namenode);
@@ -100,18 +97,16 @@ protected Configuration getConf() {
@Override
protected String[] genArgs() {
+ for (Map.Entry entry : mr.createJobConf()) {
+ args.add("-jobconf");
+ args.add(entry.getKey() + "=" + entry.getValue());
+ }
args.add("-file");
args.add(new java.io.File("target/sidefile").getAbsolutePath());
args.add("-numReduceTasks");
args.add("0");
args.add("-jobconf");
- args.add(strNamenode);
- args.add("-jobconf");
- args.add(strJobTracker);
- args.add("-jobconf");
args.add("mapred.jar=" + STREAMING_JAR);
- args.add("-jobconf");
- args.add("mapreduce.framework.name=yarn");
args.add("-verbose");
return super.genArgs();
}
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
index 105bfb1397..c5136e6dc7 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
@@ -19,14 +19,10 @@
package org.apache.hadoop.streaming;
import java.io.File;
-import java.io.FileOutputStream;
import java.io.IOException;
import java.io.DataOutputStream;
-import java.io.InputStreamReader;
-import java.io.BufferedReader;
-import java.util.Arrays;
+import java.util.Map;
import java.util.zip.ZipEntry;
-import java.util.jar.JarOutputStream;
import java.util.zip.ZipOutputStream;
import org.apache.commons.logging.Log;
@@ -37,12 +33,7 @@
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.*;
-import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.util.StringUtils;
-
-import org.junit.Test;
-import static org.junit.Assert.*;
/**
* This class tests cacheArchive option of streaming
@@ -66,8 +57,6 @@ public class TestMultipleArchiveFiles extends TestStreaming
private MiniDFSCluster dfs = null;
private MiniMRCluster mr = null;
private FileSystem fileSys = null;
- private String strJobTracker = null;
- private String strNamenode = null;
private String namenode = null;
public TestMultipleArchiveFiles() throws Exception {
@@ -80,8 +69,6 @@ public TestMultipleArchiveFiles() throws Exception {
fileSys = dfs.getFileSystem();
namenode = fileSys.getUri().getAuthority();
mr = new MiniMRCluster(1, namenode, 1);
- strJobTracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.createJobConf().get(JTConfig.JT_IPC_ADDRESS);
- strNamenode = "fs.default.name=" + mr.createJobConf().get("fs.default.name");
map = "xargs cat";
reduce = "cat";
@@ -123,6 +110,10 @@ protected String[] genArgs() {
String cache1 = workDir + CACHE_ARCHIVE_1 + "#symlink1";
String cache2 = workDir + CACHE_ARCHIVE_2 + "#symlink2";
+ for (Map.Entry entry : mr.createJobConf()) {
+ args.add("-jobconf");
+ args.add(entry.getKey() + "=" + entry.getValue());
+ }
args.add("-jobconf");
args.add("mapreduce.job.reduces=1");
args.add("-cacheArchive");
@@ -130,13 +121,7 @@ protected String[] genArgs() {
args.add("-cacheArchive");
args.add(cache2);
args.add("-jobconf");
- args.add(strNamenode);
- args.add("-jobconf");
- args.add(strJobTracker);
- args.add("-jobconf");
args.add("mapred.jar=" + STREAMING_JAR);
- args.add("-jobconf");
- args.add("mapreduce.framework.name=yarn");
return super.genArgs();
}
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleCachefiles.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleCachefiles.java
index 94e0c4222a..357bfcfd0b 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleCachefiles.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleCachefiles.java
@@ -22,8 +22,9 @@
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
-import java.io.PrintWriter;
-import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
import org.junit.Test;
import static org.junit.Assert.*;
@@ -36,7 +37,7 @@
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapred.Utils;
-import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
+
/**
* This test case tests the symlink creation
* utility provided by distributed caching
@@ -73,15 +74,18 @@ public void testMultipleCachefiles() throws Exception
String namenode = fileSys.getUri().toString();
mr = new MiniMRCluster(1, namenode, 3);
- String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.createJobConf().get(JTConfig.JT_IPC_ADDRESS);
- String strNamenode = "fs.default.name=" + mr.createJobConf().get("fs.default.name");
+
+ List args = new ArrayList();
+ for (Map.Entry entry : mr.createJobConf()) {
+ args.add("-jobconf");
+ args.add(entry.getKey() + "=" + entry.getValue());
+ }
+
String argv[] = new String[] {
"-input", INPUT_FILE,
"-output", OUTPUT_DIR,
"-mapper", map,
"-reducer", reduce,
- "-jobconf", strNamenode,
- "-jobconf", strJobtracker,
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
"-jobconf",
JobConf.MAPRED_MAP_TASK_JAVA_OPTS + "=" +
@@ -98,9 +102,13 @@ public void testMultipleCachefiles() throws Exception
"-cacheFile", fileSys.getUri() + CACHE_FILE + "#" + mapString,
"-cacheFile", fileSys.getUri() + CACHE_FILE_2 + "#" + mapString2,
"-jobconf", "mapred.jar=" + TestStreaming.STREAMING_JAR,
- "-jobconf", "mapreduce.framework.name=yarn"
};
+ for (String arg : argv) {
+ args.add(arg);
+ }
+ argv = args.toArray(new String[args.size()]);
+
fileSys.delete(new Path(OUTPUT_DIR), true);
DataOutputStream file = fileSys.create(new Path(INPUT_FILE));
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java
index e9a0286cf1..ba8cf090d3 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java
@@ -19,6 +19,9 @@
package org.apache.hadoop.streaming;
import java.io.*;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -47,20 +50,30 @@ public class TestStreamingTaskLog {
final long USERLOG_LIMIT_KB = 5;//consider 5kb as logSize
String[] genArgs() {
- return new String[] {
+
+ List args = new ArrayList();
+ for (Map.Entry entry : mr.createJobConf()) {
+ args.add("-jobconf");
+ args.add(entry.getKey() + "=" + entry.getValue());
+ }
+
+ String[] argv = new String[] {
"-input", inputPath.toString(),
"-output", outputPath.toString(),
"-mapper", map,
"-reducer", StreamJob.REDUCE_NONE,
- "-jobconf", "mapred.job.tracker=" + mr.createJobConf().get(JTConfig.JT_IPC_ADDRESS),
- "-jobconf", "fs.default.name=" + fs.getUri().toString(),
"-jobconf", "mapred.map.tasks=1",
"-jobconf", "keep.failed.task.files=true",
"-jobconf", "mapreduce.task.userlog.limit.kb=" + USERLOG_LIMIT_KB,
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
"-jobconf", "mapred.jar=" + TestStreaming.STREAMING_JAR,
- "-jobconf", "mapreduce.framework.name=yarn"
};
+
+ for (String arg : argv) {
+ args.add(arg);
+ }
+ argv = args.toArray(new String[args.size()]);
+ return argv;
}
/**
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java
index cc6a8cdd85..2c9547ad82 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java
@@ -21,6 +21,9 @@
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
import org.junit.Test;
import static org.junit.Assert.*;
@@ -62,17 +65,20 @@ public void testSymLink() throws Exception
FileSystem fileSys = dfs.getFileSystem();
String namenode = fileSys.getUri().toString();
mr = new MiniMRCluster(1, namenode, 3);
+
+ List args = new ArrayList();
+ for (Map.Entry entry : mr.createJobConf()) {
+ args.add("-jobconf");
+ args.add(entry.getKey() + "=" + entry.getValue());
+ }
+
// During tests, the default Configuration will use a local mapred
// So don't specify -config or -cluster
- String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.createJobConf().get(JTConfig.JT_IPC_ADDRESS);
- String strNamenode = "fs.default.name=" + mr.createJobConf().get("fs.default.name");
String argv[] = new String[] {
"-input", INPUT_FILE,
"-output", OUTPUT_DIR,
"-mapper", map,
"-reducer", reduce,
- "-jobconf", strNamenode,
- "-jobconf", strJobtracker,
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
"-jobconf",
JobConf.MAPRED_MAP_TASK_JAVA_OPTS+ "=" +
@@ -88,9 +94,13 @@ public void testSymLink() throws Exception
conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
"-cacheFile", fileSys.getUri() + CACHE_FILE + "#testlink",
"-jobconf", "mapred.jar=" + TestStreaming.STREAMING_JAR,
- "-jobconf", "mapreduce.framework.name=yarn"
};
+ for (String arg : argv) {
+ args.add(arg);
+ }
+ argv = args.toArray(new String[args.size()]);
+
fileSys.delete(new Path(OUTPUT_DIR), true);
DataOutputStream file = fileSys.create(new Path(INPUT_FILE));