YARN-1203. Changed YARN web-app proxy to handle http and https URLs from AM registration and finish correctly. Contributed by Omkar Vinit Joshi.
MAPREDUCE-5515. Fixed MR AM's webapp to depend on a new config mapreduce.ssl.enabled to enable https and disabling it by default as MR AM needs to set up its own certificates etc and not depend on clusters'. Contributed by Omkar Vinit Joshi. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1524864 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
af78fd729c
commit
13420d01f2
@ -17,7 +17,6 @@
|
||||
*/
|
||||
package org.apache.hadoop.http;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
@ -38,8 +37,7 @@ public class HttpConfig {
|
||||
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
static void setSecure(boolean secure) {
|
||||
public static void setSecure(boolean secure) {
|
||||
sslEnabled = secure;
|
||||
}
|
||||
|
||||
|
@ -196,6 +196,11 @@ Release 2.2.0 - UNRELEASED
|
||||
MAPREDUCE-5488. Changed MR client to keep trying to reach the application
|
||||
when it sees that on attempt's AM is down. (Jian He via vinodkv)
|
||||
|
||||
MAPREDUCE-5515. Fixed MR AM's webapp to depend on a new config
|
||||
mapreduce.ssl.enabled to enable https and disabling it by default as MR AM
|
||||
needs to set up its own certificates etc and not depend on clusters'.
|
||||
(Omkar Vinit Joshi via vinodkv)
|
||||
|
||||
Release 2.1.1-beta - 2013-09-23
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
@ -36,14 +36,17 @@
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.http.HttpConfig;
|
||||
import org.apache.hadoop.mapred.FileOutputCommitter;
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
import org.apache.hadoop.mapred.LocalContainerLauncher;
|
||||
import org.apache.hadoop.mapred.TaskAttemptListenerImpl;
|
||||
import org.apache.hadoop.mapred.TaskUmbilicalProtocol;
|
||||
import org.apache.hadoop.mapreduce.MRConfig;
|
||||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||
import org.apache.hadoop.mapreduce.OutputCommitter;
|
||||
import org.apache.hadoop.mapreduce.OutputFormat;
|
||||
@ -101,6 +104,7 @@
|
||||
import org.apache.hadoop.mapreduce.v2.app.speculate.DefaultSpeculator;
|
||||
import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator;
|
||||
import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent;
|
||||
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
|
||||
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
|
||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
|
||||
@ -1313,6 +1317,7 @@ public static void main(String[] args) {
|
||||
containerId.getApplicationAttemptId();
|
||||
long appSubmitTime = Long.parseLong(appSubmitTimeStr);
|
||||
|
||||
|
||||
MRAppMaster appMaster =
|
||||
new MRAppMaster(applicationAttemptId, containerId, nodeHostString,
|
||||
Integer.parseInt(nodePortString),
|
||||
@ -1322,6 +1327,16 @@ public static void main(String[] args) {
|
||||
new MRAppMasterShutdownHook(appMaster), SHUTDOWN_HOOK_PRIORITY);
|
||||
JobConf conf = new JobConf(new YarnConfiguration());
|
||||
conf.addResource(new Path(MRJobConfig.JOB_CONF_FILE));
|
||||
|
||||
// Explicitly disabling SSL for map reduce task as we can't allow MR users
|
||||
// to gain access to keystore file for opening SSL listener. We can trust
|
||||
// RM/NM to issue SSL certificates but definitely not MR-AM as it is
|
||||
// running in user-land.
|
||||
HttpConfig.setSecure(conf.getBoolean(MRConfig.SSL_ENABLED_KEY,
|
||||
MRConfig.SSL_ENABLED_KEY_DEFAULT));
|
||||
WebAppUtil.setSSLEnabledInYARN(conf.getBoolean(
|
||||
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY,
|
||||
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT));
|
||||
|
||||
// log the system properties
|
||||
String systemPropsToLog = MRApps.getSystemPropertiesToLog(conf);
|
||||
|
@ -27,8 +27,10 @@
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||
import org.apache.hadoop.http.HttpConfig;
|
||||
import org.apache.hadoop.ipc.Server;
|
||||
import org.apache.hadoop.mapreduce.JobACL;
|
||||
import org.apache.hadoop.mapreduce.MRConfig;
|
||||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||
import org.apache.hadoop.mapreduce.TypeConverter;
|
||||
import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
|
||||
@ -78,6 +80,7 @@
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType;
|
||||
import org.apache.hadoop.mapreduce.v2.app.security.authorize.MRAMPolicyProvider;
|
||||
import org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp;
|
||||
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.apache.hadoop.security.AccessControlException;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
|
@ -28,7 +28,9 @@
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.http.HttpConfig;
|
||||
import org.apache.hadoop.mapreduce.JobID;
|
||||
import org.apache.hadoop.mapreduce.MRConfig;
|
||||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||
import org.apache.hadoop.mapreduce.TypeConverter;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||
@ -36,12 +38,10 @@
|
||||
import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEventType;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
|
||||
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
|
||||
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
|
||||
import org.apache.hadoop.service.AbstractService;
|
||||
import org.apache.hadoop.yarn.api.ApplicationMasterProtocol;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
|
||||
@ -58,8 +58,6 @@
|
||||
import org.apache.hadoop.yarn.factories.RecordFactory;
|
||||
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
||||
|
||||
import com.sun.research.ws.wadl.Response;
|
||||
|
||||
/**
|
||||
* Registers/unregisters to RM and sends heartbeats to RM.
|
||||
*/
|
||||
@ -148,7 +146,13 @@ protected void register() {
|
||||
if (serviceAddr != null) {
|
||||
request.setHost(serviceAddr.getHostName());
|
||||
request.setRpcPort(serviceAddr.getPort());
|
||||
request.setTrackingUrl(serviceAddr.getHostName() + ":" + clientService.getHttpPort());
|
||||
String scheme = "http://";
|
||||
if (getConfig().getBoolean(MRConfig.SSL_ENABLED_KEY,
|
||||
MRConfig.SSL_ENABLED_KEY_DEFAULT)) {
|
||||
scheme = "https://";
|
||||
}
|
||||
request.setTrackingUrl(scheme + serviceAddr.getHostName() + ":"
|
||||
+ clientService.getHttpPort());
|
||||
}
|
||||
RegisterApplicationMasterResponse response =
|
||||
scheduler.registerApplicationMaster(request);
|
||||
@ -190,10 +194,11 @@ protected void unregister() {
|
||||
}
|
||||
LOG.info("Setting job diagnostics to " + sb.toString());
|
||||
|
||||
String historyUrl = JobHistoryUtils.getHistoryUrl(getConfig(),
|
||||
context.getApplicationID());
|
||||
String historyUrl =
|
||||
WebAppUtil.getSchemePrefix()
|
||||
+ JobHistoryUtils.getHistoryUrl(getConfig(),
|
||||
context.getApplicationID());
|
||||
LOG.info("History url is " + historyUrl);
|
||||
|
||||
FinishApplicationMasterRequest request =
|
||||
FinishApplicationMasterRequest.newInstance(finishState,
|
||||
sb.toString(), historyUrl);
|
||||
|
@ -43,6 +43,7 @@
|
||||
import org.apache.hadoop.yarn.webapp.Controller;
|
||||
import org.apache.hadoop.yarn.webapp.View;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.inject.Inject;
|
||||
|
||||
/**
|
||||
@ -50,6 +51,7 @@
|
||||
*/
|
||||
public class AppController extends Controller implements AMParams {
|
||||
private static final Log LOG = LogFactory.getLog(AppController.class);
|
||||
private static final Joiner JOINER = Joiner.on("");
|
||||
|
||||
protected final App app;
|
||||
|
||||
@ -58,7 +60,9 @@ protected AppController(App app, Configuration conf, RequestContext ctx,
|
||||
super(ctx);
|
||||
this.app = app;
|
||||
set(APP_ID, app.context.getApplicationID().toString());
|
||||
set(RM_WEB, YarnConfiguration.getRMWebAppURL(conf));
|
||||
set(RM_WEB,
|
||||
JOINER.join(WebAppUtil.getSchemePrefix(),
|
||||
YarnConfiguration.getRMWebAppHostAndPort(conf)));
|
||||
}
|
||||
|
||||
@Inject
|
||||
|
@ -104,7 +104,7 @@ public class JobBlock extends HtmlBlock {
|
||||
table.tr().
|
||||
td(String.valueOf(attempt.getAttemptId())).
|
||||
td(new Date(attempt.getStartTime()).toString()).
|
||||
td().a(".nodelink", url(HttpConfig.getSchemePrefix(),
|
||||
td().a(".nodelink", url(WebAppUtil.getSchemePrefix(),
|
||||
attempt.getNodeHttpAddress()),
|
||||
attempt.getNodeHttpAddress())._().
|
||||
td().a(".logslink", url(attempt.getLogsLink()),
|
||||
|
@ -63,7 +63,7 @@ public class NavBlock extends HtmlBlock {
|
||||
li().a(url("conf", jobid), "Configuration")._().
|
||||
li().a(url("tasks", jobid, "m"), "Map tasks")._().
|
||||
li().a(url("tasks", jobid, "r"), "Reduce tasks")._().
|
||||
li().a(".logslink", url(HttpConfig.getSchemePrefix(),
|
||||
li().a(".logslink", url(WebAppUtil.getSchemePrefix(),
|
||||
nodeHttpAddress, "node",
|
||||
"containerlogs", thisAmInfo.getContainerId().toString(),
|
||||
app.getJob().getUserName()),
|
||||
|
@ -86,12 +86,12 @@ protected void render(Block html) {
|
||||
.append(ta.getState().toString()).append("\",\"")
|
||||
|
||||
.append(nodeHttpAddr == null ? "N/A" :
|
||||
"<a class='nodelink' href='" + HttpConfig.getSchemePrefix() + nodeHttpAddr + "'>"
|
||||
"<a class='nodelink' href='" + WebAppUtil.getSchemePrefix() + nodeHttpAddr + "'>"
|
||||
+ nodeHttpAddr + "</a>")
|
||||
.append("\",\"")
|
||||
|
||||
.append(ta.getAssignedContainerId() == null ? "N/A" :
|
||||
"<a class='logslink' href='" + url(HttpConfig.getSchemePrefix(), nodeHttpAddr, "node"
|
||||
"<a class='logslink' href='" + url(WebAppUtil.getSchemePrefix(), nodeHttpAddr, "node"
|
||||
, "containerlogs", ta.getAssignedContainerIdStr(), app.getJob()
|
||||
.getUserName()) + "'>logs</a>")
|
||||
.append("\",\"")
|
||||
|
@ -0,0 +1,39 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.mapreduce.v2.app.webapp;
|
||||
|
||||
|
||||
public class WebAppUtil {
|
||||
private static boolean isSSLEnabledInYARN;
|
||||
|
||||
public static void setSSLEnabledInYARN(boolean isSSLEnabledInYARN) {
|
||||
WebAppUtil.isSSLEnabledInYARN = isSSLEnabledInYARN;
|
||||
}
|
||||
|
||||
public static boolean isSSLEnabledInYARN() {
|
||||
return isSSLEnabledInYARN;
|
||||
}
|
||||
|
||||
public static String getSchemePrefix() {
|
||||
if (isSSLEnabledInYARN) {
|
||||
return "https://";
|
||||
} else {
|
||||
return "http://";
|
||||
}
|
||||
}
|
||||
}
|
@ -26,6 +26,7 @@
|
||||
|
||||
import org.apache.hadoop.http.HttpConfig;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
|
||||
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||
|
||||
@ -63,7 +64,7 @@ public AMAttemptInfo(AMInfo amInfo, String jobId, String user) {
|
||||
ContainerId containerId = amInfo.getContainerId();
|
||||
if (containerId != null) {
|
||||
this.containerId = containerId.toString();
|
||||
this.logsLink = join(HttpConfig.getSchemePrefix() + nodeHttpAddress,
|
||||
this.logsLink = join(WebAppUtil.getSchemePrefix() + nodeHttpAddress,
|
||||
ujoin("node", "containerlogs", this.containerId, user));
|
||||
}
|
||||
}
|
||||
|
@ -84,6 +84,11 @@ public interface MRConfig {
|
||||
"mapreduce.shuffle.ssl.enabled";
|
||||
|
||||
public static final boolean SHUFFLE_SSL_ENABLED_DEFAULT = false;
|
||||
|
||||
public static final String SSL_ENABLED_KEY =
|
||||
"mapreduce.ssl.enabled";
|
||||
|
||||
public static final boolean SSL_ENABLED_KEY_DEFAULT = false;
|
||||
|
||||
public static final String SHUFFLE_CONSUMER_PLUGIN =
|
||||
"mapreduce.job.reduce.shuffle.consumer.plugin.class";
|
||||
|
@ -289,6 +289,20 @@
|
||||
</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>mapreduce.ssl.enabled</name>
|
||||
<value>false</value>
|
||||
<description>
|
||||
If enabled, MapReduce application master's http server will be
|
||||
started with SSL enabled. Map reduce AM by default doesn't support SSL.
|
||||
If MapReduce jobs want SSL support, it is the user's responsibility to
|
||||
create and manage certificates, keystores and trust-stores with appropriate
|
||||
permissions. This is only for MapReduce application master and is not used
|
||||
by job history server. To enable encrypted shuffle this property is not
|
||||
required, instead refer to (mapreduce.shuffle.ssl.enabled) property.
|
||||
</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>mapreduce.shuffle.ssl.file.buffer.size</name>
|
||||
<value>65536</value>
|
||||
|
@ -24,8 +24,10 @@
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
import org.apache.hadoop.mapreduce.MRConfig;
|
||||
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
|
||||
import org.apache.hadoop.mapreduce.v2.hs.server.HSAdminServer;
|
||||
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
|
||||
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
|
||||
@ -73,6 +75,10 @@ protected void serviceInit(Configuration conf) throws Exception {
|
||||
|
||||
config.setBoolean(Dispatcher.DISPATCHER_EXIT_ON_ERROR_KEY, true);
|
||||
|
||||
// This is required for WebApps to use https if enabled.
|
||||
WebAppUtil.setSSLEnabledInYARN(conf.getBoolean(
|
||||
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY,
|
||||
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT));
|
||||
try {
|
||||
doSecureLogin(conf);
|
||||
} catch(IOException ie) {
|
||||
|
@ -27,6 +27,7 @@
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
|
||||
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo;
|
||||
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo;
|
||||
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
|
||||
@ -132,7 +133,7 @@ public class HsJobBlock extends HtmlBlock {
|
||||
table.tr((odd = !odd) ? _ODD : _EVEN).
|
||||
td(String.valueOf(attempt.getAttemptId())).
|
||||
td(new Date(attempt.getStartTime()).toString()).
|
||||
td().a(".nodelink", url(HttpConfig.getSchemePrefix(),
|
||||
td().a(".nodelink", url(WebAppUtil.getSchemePrefix(),
|
||||
attempt.getNodeHttpAddress()),
|
||||
attempt.getNodeHttpAddress())._().
|
||||
td().a(".logslink", url(attempt.getShortLogsLink()),
|
||||
|
@ -35,6 +35,7 @@
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
||||
import org.apache.hadoop.mapreduce.v2.app.webapp.App;
|
||||
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
|
||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.yarn.util.Times;
|
||||
@ -148,7 +149,7 @@ protected void render(Block html) {
|
||||
.append(sortId + " ").append(taid).append("\",\"")
|
||||
.append(ta.getState().toString()).append("\",\"")
|
||||
|
||||
.append("<a class='nodelink' href='" + HttpConfig.getSchemePrefix() + nodeHttpAddr + "'>")
|
||||
.append("<a class='nodelink' href='" + WebAppUtil.getSchemePrefix() + nodeHttpAddr + "'>")
|
||||
.append(nodeRackName + "/" + nodeHttpAddr + "</a>\",\"")
|
||||
|
||||
.append("<a class='logslink' href='").append(url("logs", nodeIdString
|
||||
|
@ -126,6 +126,9 @@ Release 2.1.1-beta - 2013-09-23
|
||||
YARN-1001. Added a web-service to get statistics about per application-type
|
||||
per state for consumption by downstream projects. (Zhijie Shen via vinodkv)
|
||||
|
||||
YARN-1203. Changed YARN web-app proxy to handle http and https URLs from
|
||||
AM registration and finish correctly. (Omkar Vinit Joshi via vinodkv)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
BUG FIXES
|
||||
|
@ -91,6 +91,8 @@ public static FinishApplicationMasterRequest newInstance(
|
||||
|
||||
/**
|
||||
* Get the <em>tracking URL</em> for the <code>ApplicationMaster</code>.
|
||||
* This url if contains scheme then that will be used by resource manager
|
||||
* web application proxy otherwise it will default to http.
|
||||
* @return <em>tracking URL</em>for the <code>ApplicationMaster</code>
|
||||
*/
|
||||
@Public
|
||||
@ -99,6 +101,8 @@ public static FinishApplicationMasterRequest newInstance(
|
||||
|
||||
/**
|
||||
* Set the <em>tracking URL</em>for the <code>ApplicationMaster</code>
|
||||
* This url if contains scheme then that will be used by resource manager
|
||||
* web application proxy otherwise it will default to http.
|
||||
* @param url <em>tracking URL</em>for the
|
||||
* <code>ApplicationMaster</code>
|
||||
*/
|
||||
|
@ -103,6 +103,8 @@ public static RegisterApplicationMasterRequest newInstance(String host,
|
||||
|
||||
/**
|
||||
* Get the <em>tracking URL</em> for the <code>ApplicationMaster</code>.
|
||||
* This url if contains scheme then that will be used by resource manager
|
||||
* web application proxy otherwise it will default to http.
|
||||
* @return <em>tracking URL</em> for the <code>ApplicationMaster</code>
|
||||
*/
|
||||
@Public
|
||||
@ -111,6 +113,8 @@ public static RegisterApplicationMasterRequest newInstance(String host,
|
||||
|
||||
/**
|
||||
* Set the <em>tracking URL</em> for the <code>ApplicationMaster</code>.
|
||||
* This url if contains scheme then that will be used by resource manager
|
||||
* web application proxy otherwise it will default to http.
|
||||
* @param trackingUrl <em>tracking URL</em> for the
|
||||
* <code>ApplicationMaster</code>
|
||||
*/
|
||||
|
@ -26,6 +26,8 @@
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Evolving;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||
import org.apache.hadoop.http.HttpConfig;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.yarn.api.ApplicationConstants;
|
||||
@ -865,7 +867,8 @@ public static String getRMWebAppHostAndPort(Configuration conf) {
|
||||
}
|
||||
|
||||
public static String getRMWebAppURL(Configuration conf) {
|
||||
return JOINER.join("http://", getRMWebAppHostAndPort(conf));
|
||||
return JOINER.join(HttpConfig.getSchemePrefix(),
|
||||
getRMWebAppHostAndPort(conf));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -135,16 +135,46 @@ public static URI getProxyUri(URI originalUri, URI proxyUri,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a URI form a no scheme Url, such as is returned by the AM.
|
||||
* @param url the URL format returned by an AM. This may or may not contain
|
||||
* scheme.
|
||||
* @return a URI with an http scheme
|
||||
* @throws URISyntaxException if the url is not formatted correctly.
|
||||
*/
|
||||
public static URI getUriFromAMUrl(String url)
|
||||
throws URISyntaxException {
|
||||
if (getSchemeFromUrl(url).isEmpty()) {
|
||||
/*
|
||||
* check is made to make sure if AM reports with scheme then it will be
|
||||
* used by default otherwise it will default to the one configured using
|
||||
* "hadoop.ssl.enabled".
|
||||
*/
|
||||
return new URI(HttpConfig.getSchemePrefix() + url);
|
||||
} else {
|
||||
return new URI(url);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a URI form a no scheme Url, such as is returned by the AM.
|
||||
* @param noSchemeUrl the URL formate returned by an AM
|
||||
* @return a URI with an http scheme
|
||||
* @throws URISyntaxException if the url is not formatted correctly.
|
||||
*/
|
||||
public static URI getUriFromAMUrl(String noSchemeUrl)
|
||||
throws URISyntaxException {
|
||||
return new URI(HttpConfig.getSchemePrefix() + noSchemeUrl);
|
||||
}
|
||||
public static URI getUriFromAMUrl(String scheme, String noSchemeUrl)
|
||||
throws URISyntaxException {
|
||||
if (getSchemeFromUrl(noSchemeUrl).isEmpty()) {
|
||||
/*
|
||||
* check is made to make sure if AM reports with scheme then it will be
|
||||
* used by default otherwise it will default to the one configured using
|
||||
* "hadoop.ssl.enabled".
|
||||
*/
|
||||
return new URI(scheme + "://" + noSchemeUrl);
|
||||
} else {
|
||||
return new URI(noSchemeUrl);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the first valid tracking link, if any, from the given id from the
|
||||
@ -169,4 +199,20 @@ public static URI getUriFromTrackingPlugins(ApplicationId id,
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the scheme if present in the url
|
||||
* eg. "https://issues.apache.org/jira/browse/YARN" > "https"
|
||||
*/
|
||||
public static String getSchemeFromUrl(String url) {
|
||||
int index = 0;
|
||||
if (url != null) {
|
||||
index = url.indexOf("://");
|
||||
}
|
||||
if (index > 0) {
|
||||
return url.substring(0, index);
|
||||
} else {
|
||||
return "";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -163,7 +163,6 @@ private static void proxyLink(HttpServletRequest req,
|
||||
}
|
||||
config.setLocalAddress(localAddress);
|
||||
HttpMethod method = new GetMethod(uri.getEscapedURI());
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Enumeration<String> names = req.getHeaderNames();
|
||||
while(names.hasMoreElements()) {
|
||||
@ -293,14 +292,17 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp)
|
||||
}
|
||||
String original = applicationReport.getOriginalTrackingUrl();
|
||||
URI trackingUri = null;
|
||||
if (original != null) {
|
||||
trackingUri = ProxyUriUtils.getUriFromAMUrl(original);
|
||||
}
|
||||
// fallback to ResourceManager's app page if no tracking URI provided
|
||||
if(original == null || original.equals("N/A")) {
|
||||
resp.sendRedirect(resp.encodeRedirectURL(
|
||||
StringHelper.pjoin(rmAppPageUrlBase, id.toString())));
|
||||
return;
|
||||
} else {
|
||||
if (ProxyUriUtils.getSchemeFromUrl(original).isEmpty()) {
|
||||
trackingUri = ProxyUriUtils.getUriFromAMUrl("http", original);
|
||||
} else {
|
||||
trackingUri = new URI(original);
|
||||
}
|
||||
}
|
||||
|
||||
String runningUser = applicationReport.getUser();
|
||||
@ -311,8 +313,7 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp)
|
||||
req.getQueryString(), true), runningUser, id);
|
||||
return;
|
||||
}
|
||||
|
||||
URI toFetch = new URI(req.getScheme(),
|
||||
URI toFetch = new URI(trackingUri.getScheme(),
|
||||
trackingUri.getAuthority(),
|
||||
StringHelper.ujoin(trackingUri.getPath(), rest), req.getQueryString(),
|
||||
null);
|
||||
|
Loading…
Reference in New Issue
Block a user