YARN-1203. Changed YARN web-app proxy to handle http and https URLs from AM registration and finish correctly. Contributed by Omkar Vinit Joshi.

MAPREDUCE-5515. Fixed MR AM's webapp to depend on a new config mapreduce.ssl.enabled to enable https and disabling it by default as MR AM needs
to set up its own certificates etc and not depend on clusters'. Contributed by Omkar Vinit Joshi.
svn merge --ignore-ancestry -c 1524864 ../../trunk/


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1524867 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2013-09-19 23:51:14 +00:00
parent d8ed9ef4b8
commit d71bb8385b
22 changed files with 190 additions and 31 deletions

View File

@ -17,7 +17,6 @@
*/ */
package org.apache.hadoop.http; package org.apache.hadoop.http;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -38,8 +37,7 @@ public class HttpConfig {
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT); CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT);
} }
@VisibleForTesting public static void setSecure(boolean secure) {
static void setSecure(boolean secure) {
sslEnabled = secure; sslEnabled = secure;
} }

View File

@ -59,6 +59,11 @@ Release 2.2.0 - UNRELEASED
MAPREDUCE-5488. Changed MR client to keep trying to reach the application MAPREDUCE-5488. Changed MR client to keep trying to reach the application
when it sees that on attempt's AM is down. (Jian He via vinodkv) when it sees that on attempt's AM is down. (Jian He via vinodkv)
MAPREDUCE-5515. Fixed MR AM's webapp to depend on a new config
mapreduce.ssl.enabled to enable https and disabling it by default as MR AM
needs to set up its own certificates etc and not depend on clusters'.
(Omkar Vinit Joshi via vinodkv)
Release 2.1.1-beta - 2013-09-23 Release 2.1.1-beta - 2013-09-23
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -36,14 +36,17 @@
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapred.FileOutputCommitter; import org.apache.hadoop.mapred.FileOutputCommitter;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.LocalContainerLauncher; import org.apache.hadoop.mapred.LocalContainerLauncher;
import org.apache.hadoop.mapred.TaskAttemptListenerImpl; import org.apache.hadoop.mapred.TaskAttemptListenerImpl;
import org.apache.hadoop.mapred.TaskUmbilicalProtocol; import org.apache.hadoop.mapred.TaskUmbilicalProtocol;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.OutputFormat;
@ -101,6 +104,7 @@
import org.apache.hadoop.mapreduce.v2.app.speculate.DefaultSpeculator; import org.apache.hadoop.mapreduce.v2.app.speculate.DefaultSpeculator;
import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator; import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator;
import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent; import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent;
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
@ -1313,6 +1317,7 @@ public static void main(String[] args) {
containerId.getApplicationAttemptId(); containerId.getApplicationAttemptId();
long appSubmitTime = Long.parseLong(appSubmitTimeStr); long appSubmitTime = Long.parseLong(appSubmitTimeStr);
MRAppMaster appMaster = MRAppMaster appMaster =
new MRAppMaster(applicationAttemptId, containerId, nodeHostString, new MRAppMaster(applicationAttemptId, containerId, nodeHostString,
Integer.parseInt(nodePortString), Integer.parseInt(nodePortString),
@ -1322,6 +1327,17 @@ public static void main(String[] args) {
new MRAppMasterShutdownHook(appMaster), SHUTDOWN_HOOK_PRIORITY); new MRAppMasterShutdownHook(appMaster), SHUTDOWN_HOOK_PRIORITY);
JobConf conf = new JobConf(new YarnConfiguration()); JobConf conf = new JobConf(new YarnConfiguration());
conf.addResource(new Path(MRJobConfig.JOB_CONF_FILE)); conf.addResource(new Path(MRJobConfig.JOB_CONF_FILE));
// Explicitly disabling SSL for map reduce task as we can't allow MR users
// to gain access to keystore file for opening SSL listener. We can trust
// RM/NM to issue SSL certificates but definitely not MR-AM as it is
// running in user-land.
HttpConfig.setSecure(conf.getBoolean(MRConfig.SSL_ENABLED_KEY,
MRConfig.SSL_ENABLED_KEY_DEFAULT));
WebAppUtil.setSSLEnabledInYARN(conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY,
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT));
String jobUserName = System String jobUserName = System
.getenv(ApplicationConstants.Environment.USER.name()); .getenv(ApplicationConstants.Environment.USER.name());
conf.set(MRJobConfig.USER_NAME, jobUserName); conf.set(MRJobConfig.USER_NAME, jobUserName);

View File

@ -27,8 +27,10 @@
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.ipc.Server; import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.mapreduce.JobACL; import org.apache.hadoop.mapreduce.JobACL;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol; import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
@ -78,6 +80,7 @@
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType;
import org.apache.hadoop.mapreduce.v2.app.security.authorize.MRAMPolicyProvider; import org.apache.hadoop.mapreduce.v2.app.security.authorize.MRAMPolicyProvider;
import org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp; import org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp;
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;

View File

@ -28,7 +28,9 @@
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;
@ -36,12 +38,10 @@
import org.apache.hadoop.mapreduce.v2.app.client.ClientService; import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal; import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEventType;
import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl; import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.yarn.api.ApplicationMasterProtocol; import org.apache.hadoop.yarn.api.ApplicationMasterProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest; import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
@ -58,8 +58,6 @@
import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import com.sun.research.ws.wadl.Response;
/** /**
* Registers/unregisters to RM and sends heartbeats to RM. * Registers/unregisters to RM and sends heartbeats to RM.
*/ */
@ -148,7 +146,13 @@ protected void register() {
if (serviceAddr != null) { if (serviceAddr != null) {
request.setHost(serviceAddr.getHostName()); request.setHost(serviceAddr.getHostName());
request.setRpcPort(serviceAddr.getPort()); request.setRpcPort(serviceAddr.getPort());
request.setTrackingUrl(serviceAddr.getHostName() + ":" + clientService.getHttpPort()); String scheme = "http://";
if (getConfig().getBoolean(MRConfig.SSL_ENABLED_KEY,
MRConfig.SSL_ENABLED_KEY_DEFAULT)) {
scheme = "https://";
}
request.setTrackingUrl(scheme + serviceAddr.getHostName() + ":"
+ clientService.getHttpPort());
} }
RegisterApplicationMasterResponse response = RegisterApplicationMasterResponse response =
scheduler.registerApplicationMaster(request); scheduler.registerApplicationMaster(request);
@ -190,10 +194,11 @@ protected void unregister() {
} }
LOG.info("Setting job diagnostics to " + sb.toString()); LOG.info("Setting job diagnostics to " + sb.toString());
String historyUrl = JobHistoryUtils.getHistoryUrl(getConfig(), String historyUrl =
context.getApplicationID()); WebAppUtil.getSchemePrefix()
+ JobHistoryUtils.getHistoryUrl(getConfig(),
context.getApplicationID());
LOG.info("History url is " + historyUrl); LOG.info("History url is " + historyUrl);
FinishApplicationMasterRequest request = FinishApplicationMasterRequest request =
FinishApplicationMasterRequest.newInstance(finishState, FinishApplicationMasterRequest.newInstance(finishState,
sb.toString(), historyUrl); sb.toString(), historyUrl);

View File

@ -43,6 +43,7 @@
import org.apache.hadoop.yarn.webapp.Controller; import org.apache.hadoop.yarn.webapp.Controller;
import org.apache.hadoop.yarn.webapp.View; import org.apache.hadoop.yarn.webapp.View;
import com.google.common.base.Joiner;
import com.google.inject.Inject; import com.google.inject.Inject;
/** /**
@ -50,6 +51,7 @@
*/ */
public class AppController extends Controller implements AMParams { public class AppController extends Controller implements AMParams {
private static final Log LOG = LogFactory.getLog(AppController.class); private static final Log LOG = LogFactory.getLog(AppController.class);
private static final Joiner JOINER = Joiner.on("");
protected final App app; protected final App app;
@ -58,7 +60,9 @@ protected AppController(App app, Configuration conf, RequestContext ctx,
super(ctx); super(ctx);
this.app = app; this.app = app;
set(APP_ID, app.context.getApplicationID().toString()); set(APP_ID, app.context.getApplicationID().toString());
set(RM_WEB, YarnConfiguration.getRMWebAppURL(conf)); set(RM_WEB,
JOINER.join(WebAppUtil.getSchemePrefix(),
YarnConfiguration.getRMWebAppHostAndPort(conf)));
} }
@Inject @Inject

View File

@ -104,7 +104,7 @@ public class JobBlock extends HtmlBlock {
table.tr(). table.tr().
td(String.valueOf(attempt.getAttemptId())). td(String.valueOf(attempt.getAttemptId())).
td(new Date(attempt.getStartTime()).toString()). td(new Date(attempt.getStartTime()).toString()).
td().a(".nodelink", url(HttpConfig.getSchemePrefix(), td().a(".nodelink", url(WebAppUtil.getSchemePrefix(),
attempt.getNodeHttpAddress()), attempt.getNodeHttpAddress()),
attempt.getNodeHttpAddress())._(). attempt.getNodeHttpAddress())._().
td().a(".logslink", url(attempt.getLogsLink()), td().a(".logslink", url(attempt.getLogsLink()),

View File

@ -63,7 +63,7 @@ public class NavBlock extends HtmlBlock {
li().a(url("conf", jobid), "Configuration")._(). li().a(url("conf", jobid), "Configuration")._().
li().a(url("tasks", jobid, "m"), "Map tasks")._(). li().a(url("tasks", jobid, "m"), "Map tasks")._().
li().a(url("tasks", jobid, "r"), "Reduce tasks")._(). li().a(url("tasks", jobid, "r"), "Reduce tasks")._().
li().a(".logslink", url(HttpConfig.getSchemePrefix(), li().a(".logslink", url(WebAppUtil.getSchemePrefix(),
nodeHttpAddress, "node", nodeHttpAddress, "node",
"containerlogs", thisAmInfo.getContainerId().toString(), "containerlogs", thisAmInfo.getContainerId().toString(),
app.getJob().getUserName()), app.getJob().getUserName()),

View File

@ -86,12 +86,12 @@ protected void render(Block html) {
.append(ta.getState().toString()).append("\",\"") .append(ta.getState().toString()).append("\",\"")
.append(nodeHttpAddr == null ? "N/A" : .append(nodeHttpAddr == null ? "N/A" :
"<a class='nodelink' href='" + HttpConfig.getSchemePrefix() + nodeHttpAddr + "'>" "<a class='nodelink' href='" + WebAppUtil.getSchemePrefix() + nodeHttpAddr + "'>"
+ nodeHttpAddr + "</a>") + nodeHttpAddr + "</a>")
.append("\",\"") .append("\",\"")
.append(ta.getAssignedContainerId() == null ? "N/A" : .append(ta.getAssignedContainerId() == null ? "N/A" :
"<a class='logslink' href='" + url(HttpConfig.getSchemePrefix(), nodeHttpAddr, "node" "<a class='logslink' href='" + url(WebAppUtil.getSchemePrefix(), nodeHttpAddr, "node"
, "containerlogs", ta.getAssignedContainerIdStr(), app.getJob() , "containerlogs", ta.getAssignedContainerIdStr(), app.getJob()
.getUserName()) + "'>logs</a>") .getUserName()) + "'>logs</a>")
.append("\",\"") .append("\",\"")

View File

@ -0,0 +1,39 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.app.webapp;
public class WebAppUtil {
private static boolean isSSLEnabledInYARN;
public static void setSSLEnabledInYARN(boolean isSSLEnabledInYARN) {
WebAppUtil.isSSLEnabledInYARN = isSSLEnabledInYARN;
}
public static boolean isSSLEnabledInYARN() {
return isSSLEnabledInYARN;
}
public static String getSchemePrefix() {
if (isSSLEnabledInYARN) {
return "https://";
} else {
return "http://";
}
}
}

View File

@ -26,6 +26,7 @@
import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeId;
@ -63,7 +64,7 @@ public AMAttemptInfo(AMInfo amInfo, String jobId, String user) {
ContainerId containerId = amInfo.getContainerId(); ContainerId containerId = amInfo.getContainerId();
if (containerId != null) { if (containerId != null) {
this.containerId = containerId.toString(); this.containerId = containerId.toString();
this.logsLink = join(HttpConfig.getSchemePrefix() + nodeHttpAddress, this.logsLink = join(WebAppUtil.getSchemePrefix() + nodeHttpAddress,
ujoin("node", "containerlogs", this.containerId, user)); ujoin("node", "containerlogs", this.containerId, user));
} }
} }

View File

@ -84,6 +84,11 @@ public interface MRConfig {
"mapreduce.shuffle.ssl.enabled"; "mapreduce.shuffle.ssl.enabled";
public static final boolean SHUFFLE_SSL_ENABLED_DEFAULT = false; public static final boolean SHUFFLE_SSL_ENABLED_DEFAULT = false;
public static final String SSL_ENABLED_KEY =
"mapreduce.ssl.enabled";
public static final boolean SSL_ENABLED_KEY_DEFAULT = false;
public static final String SHUFFLE_CONSUMER_PLUGIN = public static final String SHUFFLE_CONSUMER_PLUGIN =
"mapreduce.job.reduce.shuffle.consumer.plugin.class"; "mapreduce.job.reduce.shuffle.consumer.plugin.class";

View File

@ -545,6 +545,20 @@
</description> </description>
</property> </property>
<property>
<name>mapreduce.ssl.enabled</name>
<value>false</value>
<description>
If enabled, MapReduce application master's http server will be
started with SSL enabled. Map reduce AM by default doesn't support SSL.
If MapReduce jobs want SSL support, it is the user's responsibility to
create and manage certificates, keystores and trust-stores with appropriate
permissions. This is only for MapReduce application master and is not used
by job history server. To enable encrypted shuffle this property is not
required, instead refer to (mapreduce.shuffle.ssl.enabled) property.
</description>
</property>
<property> <property>
<name>mapreduce.shuffle.ssl.file.buffer.size</name> <name>mapreduce.shuffle.ssl.file.buffer.size</name>
<value>65536</value> <value>65536</value>

View File

@ -24,8 +24,10 @@
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
import org.apache.hadoop.mapreduce.v2.hs.server.HSAdminServer; import org.apache.hadoop.mapreduce.v2.hs.server.HSAdminServer;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
@ -73,6 +75,10 @@ protected void serviceInit(Configuration conf) throws Exception {
config.setBoolean(Dispatcher.DISPATCHER_EXIT_ON_ERROR_KEY, true); config.setBoolean(Dispatcher.DISPATCHER_EXIT_ON_ERROR_KEY, true);
// This is required for WebApps to use https if enabled.
WebAppUtil.setSSLEnabledInYARN(conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY,
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT));
try { try {
doSecureLogin(conf); doSecureLogin(conf);
} catch(IOException ie) { } catch(IOException ie) {

View File

@ -27,6 +27,7 @@
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo;
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo; import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo;
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo; import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
@ -132,7 +133,7 @@ public class HsJobBlock extends HtmlBlock {
table.tr((odd = !odd) ? _ODD : _EVEN). table.tr((odd = !odd) ? _ODD : _EVEN).
td(String.valueOf(attempt.getAttemptId())). td(String.valueOf(attempt.getAttemptId())).
td(new Date(attempt.getStartTime()).toString()). td(new Date(attempt.getStartTime()).toString()).
td().a(".nodelink", url(HttpConfig.getSchemePrefix(), td().a(".nodelink", url(WebAppUtil.getSchemePrefix(),
attempt.getNodeHttpAddress()), attempt.getNodeHttpAddress()),
attempt.getNodeHttpAddress())._(). attempt.getNodeHttpAddress())._().
td().a(".logslink", url(attempt.getShortLogsLink()), td().a(".logslink", url(attempt.getShortLogsLink()),

View File

@ -35,6 +35,7 @@
import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.webapp.App; import org.apache.hadoop.mapreduce.v2.app.webapp.App;
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.util.Times; import org.apache.hadoop.yarn.util.Times;
@ -148,7 +149,7 @@ protected void render(Block html) {
.append(sortId + " ").append(taid).append("\",\"") .append(sortId + " ").append(taid).append("\",\"")
.append(ta.getState().toString()).append("\",\"") .append(ta.getState().toString()).append("\",\"")
.append("<a class='nodelink' href='" + HttpConfig.getSchemePrefix() + nodeHttpAddr + "'>") .append("<a class='nodelink' href='" + WebAppUtil.getSchemePrefix() + nodeHttpAddr + "'>")
.append(nodeRackName + "/" + nodeHttpAddr + "</a>\",\"") .append(nodeRackName + "/" + nodeHttpAddr + "</a>\",\"")
.append("<a class='logslink' href='").append(url("logs", nodeIdString .append("<a class='logslink' href='").append(url("logs", nodeIdString

View File

@ -111,6 +111,9 @@ Release 2.1.1-beta - 2013-09-23
YARN-1001. Added a web-service to get statistics about per application-type YARN-1001. Added a web-service to get statistics about per application-type
per state for consumption by downstream projects. (Zhijie Shen via vinodkv) per state for consumption by downstream projects. (Zhijie Shen via vinodkv)
YARN-1203. Changed YARN web-app proxy to handle http and https URLs from
AM registration and finish correctly. (Omkar Vinit Joshi via vinodkv)
OPTIMIZATIONS OPTIMIZATIONS
BUG FIXES BUG FIXES

View File

@ -91,6 +91,8 @@ public static FinishApplicationMasterRequest newInstance(
/** /**
* Get the <em>tracking URL</em> for the <code>ApplicationMaster</code>. * Get the <em>tracking URL</em> for the <code>ApplicationMaster</code>.
* This url if contains scheme then that will be used by resource manager
* web application proxy otherwise it will default to http.
* @return <em>tracking URL</em>for the <code>ApplicationMaster</code> * @return <em>tracking URL</em>for the <code>ApplicationMaster</code>
*/ */
@Public @Public
@ -99,6 +101,8 @@ public static FinishApplicationMasterRequest newInstance(
/** /**
* Set the <em>tracking URL</em>for the <code>ApplicationMaster</code> * Set the <em>tracking URL</em>for the <code>ApplicationMaster</code>
* This url if contains scheme then that will be used by resource manager
* web application proxy otherwise it will default to http.
* @param url <em>tracking URL</em>for the * @param url <em>tracking URL</em>for the
* <code>ApplicationMaster</code> * <code>ApplicationMaster</code>
*/ */

View File

@ -103,6 +103,8 @@ public static RegisterApplicationMasterRequest newInstance(String host,
/** /**
* Get the <em>tracking URL</em> for the <code>ApplicationMaster</code>. * Get the <em>tracking URL</em> for the <code>ApplicationMaster</code>.
* This url if contains scheme then that will be used by resource manager
* web application proxy otherwise it will default to http.
* @return <em>tracking URL</em> for the <code>ApplicationMaster</code> * @return <em>tracking URL</em> for the <code>ApplicationMaster</code>
*/ */
@Public @Public
@ -111,6 +113,8 @@ public static RegisterApplicationMasterRequest newInstance(String host,
/** /**
* Set the <em>tracking URL</em> for the <code>ApplicationMaster</code>. * Set the <em>tracking URL</em> for the <code>ApplicationMaster</code>.
* This url if contains scheme then that will be used by resource manager
* web application proxy otherwise it will default to http.
* @param trackingUrl <em>tracking URL</em> for the * @param trackingUrl <em>tracking URL</em> for the
* <code>ApplicationMaster</code> * <code>ApplicationMaster</code>
*/ */

View File

@ -26,6 +26,8 @@
import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants;
@ -865,7 +867,8 @@ public static String getRMWebAppHostAndPort(Configuration conf) {
} }
public static String getRMWebAppURL(Configuration conf) { public static String getRMWebAppURL(Configuration conf) {
return JOINER.join("http://", getRMWebAppHostAndPort(conf)); return JOINER.join(HttpConfig.getSchemePrefix(),
getRMWebAppHostAndPort(conf));
} }
} }

View File

@ -135,16 +135,46 @@ public static URI getProxyUri(URI originalUri, URI proxyUri,
} }
} }
/**
* Create a URI form a no scheme Url, such as is returned by the AM.
* @param url the URL format returned by an AM. This may or may not contain
* scheme.
* @return a URI with an http scheme
* @throws URISyntaxException if the url is not formatted correctly.
*/
public static URI getUriFromAMUrl(String url)
throws URISyntaxException {
if (getSchemeFromUrl(url).isEmpty()) {
/*
* check is made to make sure if AM reports with scheme then it will be
* used by default otherwise it will default to the one configured using
* "hadoop.ssl.enabled".
*/
return new URI(HttpConfig.getSchemePrefix() + url);
} else {
return new URI(url);
}
}
/** /**
* Create a URI form a no scheme Url, such as is returned by the AM. * Create a URI form a no scheme Url, such as is returned by the AM.
* @param noSchemeUrl the URL formate returned by an AM * @param noSchemeUrl the URL formate returned by an AM
* @return a URI with an http scheme * @return a URI with an http scheme
* @throws URISyntaxException if the url is not formatted correctly. * @throws URISyntaxException if the url is not formatted correctly.
*/ */
public static URI getUriFromAMUrl(String noSchemeUrl) public static URI getUriFromAMUrl(String scheme, String noSchemeUrl)
throws URISyntaxException { throws URISyntaxException {
return new URI(HttpConfig.getSchemePrefix() + noSchemeUrl); if (getSchemeFromUrl(noSchemeUrl).isEmpty()) {
} /*
* check is made to make sure if AM reports with scheme then it will be
* used by default otherwise it will default to the one configured using
* "hadoop.ssl.enabled".
*/
return new URI(scheme + "://" + noSchemeUrl);
} else {
return new URI(noSchemeUrl);
}
}
/** /**
* Returns the first valid tracking link, if any, from the given id from the * Returns the first valid tracking link, if any, from the given id from the
@ -169,4 +199,20 @@ public static URI getUriFromTrackingPlugins(ApplicationId id,
} }
return null; return null;
} }
/**
* Returns the scheme if present in the url
* eg. "https://issues.apache.org/jira/browse/YARN" > "https"
*/
public static String getSchemeFromUrl(String url) {
int index = 0;
if (url != null) {
index = url.indexOf("://");
}
if (index > 0) {
return url.substring(0, index);
} else {
return "";
}
}
} }

View File

@ -163,7 +163,6 @@ private static void proxyLink(HttpServletRequest req,
} }
config.setLocalAddress(localAddress); config.setLocalAddress(localAddress);
HttpMethod method = new GetMethod(uri.getEscapedURI()); HttpMethod method = new GetMethod(uri.getEscapedURI());
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
Enumeration<String> names = req.getHeaderNames(); Enumeration<String> names = req.getHeaderNames();
while(names.hasMoreElements()) { while(names.hasMoreElements()) {
@ -293,14 +292,17 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp)
} }
String original = applicationReport.getOriginalTrackingUrl(); String original = applicationReport.getOriginalTrackingUrl();
URI trackingUri = null; URI trackingUri = null;
if (original != null) {
trackingUri = ProxyUriUtils.getUriFromAMUrl(original);
}
// fallback to ResourceManager's app page if no tracking URI provided // fallback to ResourceManager's app page if no tracking URI provided
if(original == null || original.equals("N/A")) { if(original == null || original.equals("N/A")) {
resp.sendRedirect(resp.encodeRedirectURL( resp.sendRedirect(resp.encodeRedirectURL(
StringHelper.pjoin(rmAppPageUrlBase, id.toString()))); StringHelper.pjoin(rmAppPageUrlBase, id.toString())));
return; return;
} else {
if (ProxyUriUtils.getSchemeFromUrl(original).isEmpty()) {
trackingUri = ProxyUriUtils.getUriFromAMUrl("http", original);
} else {
trackingUri = new URI(original);
}
} }
String runningUser = applicationReport.getUser(); String runningUser = applicationReport.getUser();
@ -311,8 +313,7 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp)
req.getQueryString(), true), runningUser, id); req.getQueryString(), true), runningUser, id);
return; return;
} }
URI toFetch = new URI(trackingUri.getScheme(),
URI toFetch = new URI(req.getScheme(),
trackingUri.getAuthority(), trackingUri.getAuthority(),
StringHelper.ujoin(trackingUri.getPath(), rest), req.getQueryString(), StringHelper.ujoin(trackingUri.getPath(), rest), req.getQueryString(),
null); null);