MAPREDUCE-5536. Fixed MR AM and JHS to respect mapreduce.jobhistory.webapp.https.address. Contributed by Omkar Vinit Joshi.

svn merge --ignore-ancestry -c 1528251 ../../trunk/


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1528253 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2013-10-01 22:54:35 +00:00
parent 2c329793c6
commit 6daca6a93d
22 changed files with 311 additions and 211 deletions

View File

@ -120,6 +120,9 @@ Release 2.1.2 - UNRELEASED
MAPREDUCE-5544. JobClient#getJob loads job conf twice. (Sandy Ryza) MAPREDUCE-5544. JobClient#getJob loads job conf twice. (Sandy Ryza)
MAPREDUCE-5536. Fixed MR AM and JHS to respect
mapreduce.jobhistory.webapp.https.address. (Omkar Vinit Joshi via vinodkv)
Release 2.1.1-beta - 2013-09-23 Release 2.1.1-beta - 2013-09-23
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -105,10 +105,11 @@ import org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler;
import org.apache.hadoop.mapreduce.v2.app.speculate.DefaultSpeculator; import org.apache.hadoop.mapreduce.v2.app.speculate.DefaultSpeculator;
import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator; import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator;
import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent; import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent;
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
@ -1349,12 +1350,8 @@ public class MRAppMaster extends CompositeService {
// to gain access to keystore file for opening SSL listener. We can trust // to gain access to keystore file for opening SSL listener. We can trust
// RM/NM to issue SSL certificates but definitely not MR-AM as it is // RM/NM to issue SSL certificates but definitely not MR-AM as it is
// running in user-land. // running in user-land.
HttpConfig.setSecure(conf.getBoolean(MRConfig.SSL_ENABLED_KEY, MRWebAppUtil.initialize(conf);
MRConfig.SSL_ENABLED_KEY_DEFAULT)); HttpConfig.setSecure(MRWebAppUtil.isSSLEnabledInMRAM());
WebAppUtil.setSSLEnabledInYARN(conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY,
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT));
String jobUserName = System String jobUserName = System
.getenv(ApplicationConstants.Environment.USER.name()); .getenv(ApplicationConstants.Environment.USER.name());
conf.set(MRJobConfig.USER_NAME, jobUserName); conf.set(MRJobConfig.USER_NAME, jobUserName);

View File

@ -27,10 +27,8 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.ipc.Server; import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.mapreduce.JobACL; import org.apache.hadoop.mapreduce.JobACL;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol; import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
@ -80,7 +78,6 @@ import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType;
import org.apache.hadoop.mapreduce.v2.app.security.authorize.MRAMPolicyProvider; import org.apache.hadoop.mapreduce.v2.app.security.authorize.MRAMPolicyProvider;
import org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp; import org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp;
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;

View File

@ -28,7 +28,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.MRJobConfig;
@ -39,8 +38,7 @@ import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal; import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl; import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil; import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.yarn.api.ApplicationMasterProtocol; import org.apache.hadoop.yarn.api.ApplicationMasterProtocol;
@ -146,13 +144,9 @@ public abstract class RMCommunicator extends AbstractService
if (serviceAddr != null) { if (serviceAddr != null) {
request.setHost(serviceAddr.getHostName()); request.setHost(serviceAddr.getHostName());
request.setRpcPort(serviceAddr.getPort()); request.setRpcPort(serviceAddr.getPort());
String scheme = "http://"; request.setTrackingUrl(MRWebAppUtil
if (getConfig().getBoolean(MRConfig.SSL_ENABLED_KEY, .getAMWebappScheme(getConfig())
MRConfig.SSL_ENABLED_KEY_DEFAULT)) { + serviceAddr.getHostName() + ":" + clientService.getHttpPort());
scheme = "https://";
}
request.setTrackingUrl(scheme + serviceAddr.getHostName() + ":"
+ clientService.getHttpPort());
} }
RegisterApplicationMasterResponse response = RegisterApplicationMasterResponse response =
scheduler.registerApplicationMaster(request); scheduler.registerApplicationMaster(request);
@ -195,9 +189,8 @@ public abstract class RMCommunicator extends AbstractService
LOG.info("Setting job diagnostics to " + sb.toString()); LOG.info("Setting job diagnostics to " + sb.toString());
String historyUrl = String historyUrl =
WebAppUtil.getSchemePrefix() MRWebAppUtil.getApplicationWebURLOnJHSWithScheme(getConfig(),
+ JobHistoryUtils.getHistoryUrl(getConfig(), context.getApplicationID());
context.getApplicationID());
LOG.info("History url is " + historyUrl); LOG.info("History url is " + historyUrl);
FinishApplicationMasterRequest request = FinishApplicationMasterRequest request =
FinishApplicationMasterRequest.newInstance(finishState, FinishApplicationMasterRequest.newInstance(finishState,

View File

@ -36,6 +36,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AppInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AppInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.util.StringHelper; import org.apache.hadoop.yarn.util.StringHelper;
import org.apache.hadoop.yarn.util.Times; import org.apache.hadoop.yarn.util.Times;
@ -61,7 +62,7 @@ public class AppController extends Controller implements AMParams {
this.app = app; this.app = app;
set(APP_ID, app.context.getApplicationID().toString()); set(APP_ID, app.context.getApplicationID().toString());
set(RM_WEB, set(RM_WEB,
JOINER.join(WebAppUtil.getSchemePrefix(), JOINER.join(MRWebAppUtil.getYARNWebappScheme(),
WebAppUtils.getResolvedRMWebAppURLWithoutScheme(conf))); WebAppUtils.getResolvedRMWebAppURLWithoutScheme(conf)));
} }

View File

@ -39,6 +39,7 @@ import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI; import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI;
import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
@ -104,7 +105,7 @@ public class JobBlock extends HtmlBlock {
table.tr(). table.tr().
td(String.valueOf(attempt.getAttemptId())). td(String.valueOf(attempt.getAttemptId())).
td(new Date(attempt.getStartTime()).toString()). td(new Date(attempt.getStartTime()).toString()).
td().a(".nodelink", url(WebAppUtil.getSchemePrefix(), td().a(".nodelink", url(MRWebAppUtil.getYARNWebappScheme(),
attempt.getNodeHttpAddress()), attempt.getNodeHttpAddress()),
attempt.getNodeHttpAddress())._(). attempt.getNodeHttpAddress())._().
td().a(".logslink", url(attempt.getLogsLink()), td().a(".logslink", url(attempt.getLogsLink()),

View File

@ -18,19 +18,19 @@
package org.apache.hadoop.mapreduce.v2.app.webapp; package org.apache.hadoop.mapreduce.v2.app.webapp;
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.RM_WEB;
import java.util.List; import java.util.List;
import com.google.inject.Inject;
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp.*;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.*; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock; import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import com.google.inject.Inject;
public class NavBlock extends HtmlBlock { public class NavBlock extends HtmlBlock {
final App app; final App app;
@ -63,7 +63,7 @@ public class NavBlock extends HtmlBlock {
li().a(url("conf", jobid), "Configuration")._(). li().a(url("conf", jobid), "Configuration")._().
li().a(url("tasks", jobid, "m"), "Map tasks")._(). li().a(url("tasks", jobid, "m"), "Map tasks")._().
li().a(url("tasks", jobid, "r"), "Reduce tasks")._(). li().a(url("tasks", jobid, "r"), "Reduce tasks")._().
li().a(".logslink", url(WebAppUtil.getSchemePrefix(), li().a(".logslink", url(MRWebAppUtil.getYARNWebappScheme(),
nodeHttpAddress, "node", nodeHttpAddress, "node",
"containerlogs", thisAmInfo.getContainerId().toString(), "containerlogs", thisAmInfo.getContainerId().toString(),
app.getJob().getUserName()), app.getJob().getUserName()),

View File

@ -28,14 +28,14 @@ import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
import java.util.Collection; import java.util.Collection;
import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringEscapeUtils;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.yarn.webapp.SubView; import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import com.google.inject.Inject; import com.google.inject.Inject;
@ -86,12 +86,12 @@ public class TaskPage extends AppView {
.append(ta.getState().toString()).append("\",\"") .append(ta.getState().toString()).append("\",\"")
.append(nodeHttpAddr == null ? "N/A" : .append(nodeHttpAddr == null ? "N/A" :
"<a class='nodelink' href='" + WebAppUtil.getSchemePrefix() + nodeHttpAddr + "'>" "<a class='nodelink' href='" + MRWebAppUtil.getYARNWebappScheme() + nodeHttpAddr + "'>"
+ nodeHttpAddr + "</a>") + nodeHttpAddr + "</a>")
.append("\",\"") .append("\",\"")
.append(ta.getAssignedContainerId() == null ? "N/A" : .append(ta.getAssignedContainerId() == null ? "N/A" :
"<a class='logslink' href='" + url(WebAppUtil.getSchemePrefix(), nodeHttpAddr, "node" "<a class='logslink' href='" + url(MRWebAppUtil.getYARNWebappScheme(), nodeHttpAddr, "node"
, "containerlogs", ta.getAssignedContainerIdStr(), app.getJob() , "containerlogs", ta.getAssignedContainerIdStr(), app.getJob()
.getUserName()) + "'>logs</a>") .getUserName()) + "'>logs</a>")
.append("\",\"") .append("\",\"")

View File

@ -1,60 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.app.webapp;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
public class WebAppUtil {
private static boolean isSSLEnabledInYARN;
public static void setSSLEnabledInYARN(boolean isSSLEnabledInYARN) {
WebAppUtil.isSSLEnabledInYARN = isSSLEnabledInYARN;
}
public static boolean isSSLEnabledInYARN() {
return isSSLEnabledInYARN;
}
public static String getSchemePrefix() {
if (isSSLEnabledInYARN) {
return "https://";
} else {
return "http://";
}
}
public static void setJHSWebAppURLWithoutScheme(Configuration conf,
String hostAddress) {
if (HttpConfig.isSecure()) {
conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS, hostAddress);
} else {
conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, hostAddress);
}
}
public static String getJHSWebAppURLWithoutScheme(Configuration conf) {
if (HttpConfig.isSecure()) {
return conf.get(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS);
} else {
return conf.get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS);
}
}
}

View File

@ -24,9 +24,8 @@ import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil; import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeId;
@ -64,7 +63,7 @@ public class AMAttemptInfo {
ContainerId containerId = amInfo.getContainerId(); ContainerId containerId = amInfo.getContainerId();
if (containerId != null) { if (containerId != null) {
this.containerId = containerId.toString(); this.containerId = containerId.toString();
this.logsLink = join(WebAppUtil.getSchemePrefix() + nodeHttpAddress, this.logsLink = join(MRWebAppUtil.getYARNWebappScheme() + nodeHttpAddress,
ujoin("node", "containerlogs", this.containerId, user)); ujoin("node", "containerlogs", this.containerId, user));
} }
} }

View File

@ -25,7 +25,6 @@ import java.net.UnknownHostException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
/** /**
@ -130,6 +129,11 @@ public class JHAdminConfig {
public static final String MR_HISTORY_PRINCIPAL = public static final String MR_HISTORY_PRINCIPAL =
MR_HISTORY_PREFIX + "principal"; MR_HISTORY_PREFIX + "principal";
/** To enable SSL in MR history server */
public static final String MR_HS_SSL_ENABLED = MR_HISTORY_PREFIX
+ "ssl.enabled";
public static boolean DEFAULT_MR_HS_SSL_ENABLED = false;
/**The address the history server webapp is on.*/ /**The address the history server webapp is on.*/
public static final String MR_HISTORY_WEBAPP_ADDRESS = public static final String MR_HISTORY_WEBAPP_ADDRESS =
MR_HISTORY_PREFIX + "webapp.address"; MR_HISTORY_PREFIX + "webapp.address";
@ -188,43 +192,11 @@ public class JHAdminConfig {
/** Whether to use fixed ports with the minicluster. */ /** Whether to use fixed ports with the minicluster. */
public static final String MR_HISTORY_MINICLUSTER_FIXED_PORTS = MR_HISTORY_PREFIX public static final String MR_HISTORY_MINICLUSTER_FIXED_PORTS = MR_HISTORY_PREFIX
+ "minicluster.fixed.ports"; + "minicluster.fixed.ports";
/** /**
* Default is false to be able to run tests concurrently without port * Default is false to be able to run tests concurrently without port
* conflicts. * conflicts.
*/ */
public static boolean DEFAULT_MR_HISTORY_MINICLUSTER_FIXED_PORTS = false; public static boolean DEFAULT_MR_HISTORY_MINICLUSTER_FIXED_PORTS = false;
public static String getResolvedMRHistoryWebAppURLWithoutScheme(
Configuration conf) {
InetSocketAddress address = null;
if (HttpConfig.isSecure()) {
address =
conf.getSocketAddr(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT);
} else {
address =
conf.getSocketAddr(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT); }
address = NetUtils.getConnectAddress(address);
StringBuffer sb = new StringBuffer();
InetAddress resolved = address.getAddress();
if (resolved == null || resolved.isAnyLocalAddress() ||
resolved.isLoopbackAddress()) {
String lh = address.getHostName();
try {
lh = InetAddress.getLocalHost().getCanonicalHostName();
} catch (UnknownHostException e) {
//Ignore and fallback.
}
sb.append(lh);
} else {
sb.append(address.getHostName());
}
sb.append(":").append(address.getPort());
return sb.toString();
}
} }

View File

@ -20,11 +20,7 @@ package org.apache.hadoop.mapreduce.v2.jobhistory;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.Calendar; import java.util.Calendar;
import java.util.Iterator;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
@ -45,13 +41,8 @@ import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Unstable @InterfaceStability.Unstable
@ -126,9 +117,6 @@ public class JobHistoryUtils {
public static final Pattern TIMESTAMP_DIR_PATTERN = Pattern.compile(TIMESTAMP_DIR_REGEX); public static final Pattern TIMESTAMP_DIR_PATTERN = Pattern.compile(TIMESTAMP_DIR_REGEX);
private static final String TIMESTAMP_DIR_FORMAT = "%04d" + File.separator + "%02d" + File.separator + "%02d"; private static final String TIMESTAMP_DIR_FORMAT = "%04d" + File.separator + "%02d" + File.separator + "%02d";
private static final Splitter ADDR_SPLITTER = Splitter.on(':').trimResults();
private static final Joiner JOINER = Joiner.on("");
private static final PathFilter CONF_FILTER = new PathFilter() { private static final PathFilter CONF_FILTER = new PathFilter() {
@Override @Override
public boolean accept(Path path) { public boolean accept(Path path) {
@ -497,36 +485,6 @@ public class JobHistoryUtils {
return result; return result;
} }
public static String getHistoryUrl(Configuration conf, ApplicationId appId)
throws UnknownHostException {
//construct the history url for job
String addr = conf.get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS);
Iterator<String> it = ADDR_SPLITTER.split(addr).iterator();
it.next(); // ignore the bind host
String port = it.next();
// Use hs address to figure out the host for webapp
addr = conf.get(JHAdminConfig.MR_HISTORY_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_ADDRESS);
String host = ADDR_SPLITTER.split(addr).iterator().next();
String hsAddress = JOINER.join(host, ":", port);
InetSocketAddress address = NetUtils.createSocketAddr(
hsAddress, JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS);
StringBuffer sb = new StringBuffer();
if (address.getAddress().isAnyLocalAddress() ||
address.getAddress().isLoopbackAddress()) {
sb.append(InetAddress.getLocalHost().getCanonicalHostName());
} else {
sb.append(address.getHostName());
}
sb.append(":").append(address.getPort());
sb.append("/jobhistory/job/");
JobID jobId = TypeConverter.fromYarn(appId);
sb.append(jobId.toString());
return sb.toString();
}
public static Path getPreviousJobHistoryPath( public static Path getPreviousJobHistoryPath(
Configuration conf, ApplicationAttemptId applicationAttemptId) Configuration conf, ApplicationAttemptId applicationAttemptId)
throws IOException { throws IOException {

View File

@ -0,0 +1,193 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.util;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.Iterator;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
@Private
@Evolving
public class MRWebAppUtil {
private static final Splitter ADDR_SPLITTER = Splitter.on(':').trimResults();
private static final Joiner JOINER = Joiner.on("");
private static boolean isSSLEnabledInYARN;
private static boolean isSSLEnabledInJHS;
private static boolean isSSLEnabledInMRAM;
public static void initialize(Configuration conf) {
setSSLEnabledInYARN(conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY,
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT));
setSSLEnabledInJHS(conf.getBoolean(JHAdminConfig.MR_HS_SSL_ENABLED,
JHAdminConfig.DEFAULT_MR_HS_SSL_ENABLED));
setSSLEnabledInMRAM(conf.getBoolean(MRConfig.SSL_ENABLED_KEY,
MRConfig.SSL_ENABLED_KEY_DEFAULT));
}
private static void setSSLEnabledInYARN(boolean isSSLEnabledInYARN) {
MRWebAppUtil.isSSLEnabledInYARN = isSSLEnabledInYARN;
}
private static void setSSLEnabledInJHS(boolean isSSLEnabledInJHS) {
MRWebAppUtil.isSSLEnabledInJHS = isSSLEnabledInJHS;
}
private static void setSSLEnabledInMRAM(boolean isSSLEnabledInMRAM) {
MRWebAppUtil.isSSLEnabledInMRAM = isSSLEnabledInMRAM;
}
public static boolean isSSLEnabledInYARN() {
return isSSLEnabledInYARN;
}
public static boolean isSSLEnabledInJHS() {
return isSSLEnabledInJHS;
}
public static boolean isSSLEnabledInMRAM() {
return isSSLEnabledInMRAM;
}
public static String getYARNWebappScheme() {
if (isSSLEnabledInYARN) {
return "https://";
} else {
return "http://";
}
}
public static String getJHSWebappScheme() {
if (isSSLEnabledInJHS) {
return "https://";
} else {
return "http://";
}
}
public static void setJHSWebappURLWithoutScheme(Configuration conf,
String hostAddress) {
if (isSSLEnabledInJHS) {
conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS, hostAddress);
} else {
conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, hostAddress);
}
}
public static String getJHSWebappURLWithoutScheme(Configuration conf) {
if (isSSLEnabledInJHS) {
return conf.get(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS);
} else {
return conf.get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS);
}
}
public static String getJHSWebappURLWithScheme(Configuration conf) {
return getJHSWebappScheme() + getJHSWebappURLWithoutScheme(conf);
}
public static InetSocketAddress getJHSWebBindAddress(Configuration conf) {
if (isSSLEnabledInJHS) {
return conf.getSocketAddr(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT);
} else {
return conf.getSocketAddr(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT);
}
}
public static String getApplicationWebURLOnJHSWithoutScheme(Configuration conf,
ApplicationId appId)
throws UnknownHostException {
//construct the history url for job
String addr = getJHSWebappURLWithoutScheme(conf);
Iterator<String> it = ADDR_SPLITTER.split(addr).iterator();
it.next(); // ignore the bind host
String port = it.next();
// Use hs address to figure out the host for webapp
addr = conf.get(JHAdminConfig.MR_HISTORY_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_ADDRESS);
String host = ADDR_SPLITTER.split(addr).iterator().next();
String hsAddress = JOINER.join(host, ":", port);
InetSocketAddress address = NetUtils.createSocketAddr(
hsAddress, getDefaultJHSWebappPort(),
getDefaultJHSWebappURLWithoutScheme());
StringBuffer sb = new StringBuffer();
if (address.getAddress().isAnyLocalAddress() ||
address.getAddress().isLoopbackAddress()) {
sb.append(InetAddress.getLocalHost().getCanonicalHostName());
} else {
sb.append(address.getHostName());
}
sb.append(":").append(address.getPort());
sb.append("/jobhistory/job/");
JobID jobId = TypeConverter.fromYarn(appId);
sb.append(jobId.toString());
return sb.toString();
}
public static String getApplicationWebURLOnJHSWithScheme(Configuration conf,
ApplicationId appId) throws UnknownHostException {
return getJHSWebappScheme()
+ getApplicationWebURLOnJHSWithoutScheme(conf, appId);
}
private static int getDefaultJHSWebappPort() {
if (isSSLEnabledInJHS) {
return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT;
} else {
return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT;
}
}
private static String getDefaultJHSWebappURLWithoutScheme() {
if (isSSLEnabledInJHS) {
return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS;
} else {
return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS;
}
}
public static String getAMWebappScheme(Configuration conf) {
if (isSSLEnabledInMRAM) {
return "https://";
} else {
return "http://";
}
}
}

View File

@ -86,7 +86,7 @@ public interface MRConfig {
public static final boolean SHUFFLE_SSL_ENABLED_DEFAULT = false; public static final boolean SHUFFLE_SSL_ENABLED_DEFAULT = false;
public static final String SSL_ENABLED_KEY = public static final String SSL_ENABLED_KEY =
"mapreduce.ssl.enabled"; "mapreduce.am.ssl.enabled";
public static final boolean SSL_ENABLED_KEY_DEFAULT = false; public static final boolean SSL_ENABLED_KEY_DEFAULT = false;

View File

@ -546,7 +546,7 @@
</property> </property>
<property> <property>
<name>mapreduce.ssl.enabled</name> <name>mapreduce.am.ssl.enabled</name>
<value>false</value> <value>false</value>
<description> <description>
If enabled, MapReduce application master's http server will be If enabled, MapReduce application master's http server will be
@ -1653,4 +1653,12 @@
storage class.</description> storage class.</description>
</property> </property>
<property>
<name>mapreduce.jobhistory.ssl.enabled</name>
<value>false</value>
<description>
Whether to use SSL for the HTTP endpoints. If set to true, the
JobHistoryServer web UIs will be served over HTTPS instead HTTP.
</description>
</property>
</configuration> </configuration>

View File

@ -24,7 +24,7 @@ import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.util.TrackingUriPlugin; import org.apache.hadoop.yarn.util.TrackingUriPlugin;
@ -54,8 +54,7 @@ public class MapReduceTrackingUriPlugin extends TrackingUriPlugin implements
public URI getTrackingUri(ApplicationId id) throws URISyntaxException { public URI getTrackingUri(ApplicationId id) throws URISyntaxException {
String jobSuffix = id.toString().replaceFirst("^application_", "job_"); String jobSuffix = id.toString().replaceFirst("^application_", "job_");
String historyServerAddress = String historyServerAddress =
this.getConf().get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS); MRWebAppUtil.getJHSWebappURLWithScheme(getConf());
return new URI("http://" + historyServerAddress + "/jobhistory/job/" return new URI(historyServerAddress + "/jobhistory/job/"+ jobSuffix);
+ jobSuffix);
} }
} }

View File

@ -58,6 +58,7 @@ import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo; import org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
@ -142,7 +143,9 @@ public class CompletedJob implements org.apache.hadoop.mapreduce.v2.app.job.Job
report.setJobFile(getConfFile().toString()); report.setJobFile(getConfFile().toString());
String historyUrl = "N/A"; String historyUrl = "N/A";
try { try {
historyUrl = JobHistoryUtils.getHistoryUrl(conf, jobId.getAppId()); historyUrl =
MRWebAppUtil.getApplicationWebURLOnJHSWithoutScheme(conf,
jobId.getAppId());
} catch (UnknownHostException e) { } catch (UnknownHostException e) {
//Ignore. //Ignore.
} }

View File

@ -75,12 +75,12 @@ import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.security.authorize.ClientHSPolicyProvider; import org.apache.hadoop.mapreduce.v2.app.security.authorize.ClientHSPolicyProvider;
import org.apache.hadoop.mapreduce.v2.hs.webapp.HsWebApp; import org.apache.hadoop.mapreduce.v2.hs.webapp.HsWebApp;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.ipc.YarnRPC;
@ -144,10 +144,7 @@ public class HistoryClientService extends AbstractService {
private void initializeWebApp(Configuration conf) { private void initializeWebApp(Configuration conf) {
webApp = new HsWebApp(history); webApp = new HsWebApp(history);
InetSocketAddress bindAddress = conf.getSocketAddr( InetSocketAddress bindAddress = MRWebAppUtil.getJHSWebBindAddress(conf);
JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT);
// NOTE: there should be a .at(InetSocketAddress) // NOTE: there should be a .at(InetSocketAddress)
WebApps WebApps
.$for("jobhistory", HistoryClientService.class, this, "ws") .$for("jobhistory", HistoryClientService.class, this, "ws")
@ -157,8 +154,9 @@ public class HistoryClientService extends AbstractService {
.withHttpSpnegoPrincipalKey( .withHttpSpnegoPrincipalKey(
JHAdminConfig.MR_WEBAPP_SPNEGO_USER_NAME_KEY) JHAdminConfig.MR_WEBAPP_SPNEGO_USER_NAME_KEY)
.at(NetUtils.getHostPortString(bindAddress)).start(webApp); .at(NetUtils.getHostPortString(bindAddress)).start(webApp);
conf.updateConnectAddr(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
webApp.getListenerAddress()); MRWebAppUtil.setJHSWebappURLWithoutScheme(conf,
NetUtils.getHostPortString(webApp.getListenerAddress()));
} }
@Override @Override

View File

@ -25,12 +25,13 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
import org.apache.hadoop.mapreduce.v2.hs.HistoryServerStateStoreService.HistoryServerState; import org.apache.hadoop.mapreduce.v2.hs.HistoryServerStateStoreService.HistoryServerState;
import org.apache.hadoop.mapreduce.v2.hs.server.HSAdminServer; import org.apache.hadoop.mapreduce.v2.hs.server.HSAdminServer;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.source.JvmMetrics; import org.apache.hadoop.metrics2.source.JvmMetrics;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
@ -118,9 +119,8 @@ public class JobHistoryServer extends CompositeService {
config.setBoolean(Dispatcher.DISPATCHER_EXIT_ON_ERROR_KEY, true); config.setBoolean(Dispatcher.DISPATCHER_EXIT_ON_ERROR_KEY, true);
// This is required for WebApps to use https if enabled. // This is required for WebApps to use https if enabled.
WebAppUtil.setSSLEnabledInYARN(conf.getBoolean( MRWebAppUtil.initialize(getConfig());
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY, HttpConfig.setSecure(MRWebAppUtil.isSSLEnabledInJHS());
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT));
try { try {
doSecureLogin(conf); doSecureLogin(conf);
} catch(IOException ie) { } catch(IOException ie) {

View File

@ -18,21 +18,25 @@
package org.apache.hadoop.mapreduce.v2.hs.webapp; package org.apache.hadoop.mapreduce.v2.hs.webapp;
import com.google.inject.Inject; import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.JOB_ID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._EVEN;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._INFO_WRAP;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._ODD;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._TH;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo;
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo; import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo;
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo; import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI; import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI;
import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.util.Times; import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.ResponseInfo; import org.apache.hadoop.yarn.webapp.ResponseInfo;
@ -41,8 +45,8 @@ import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock; import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import org.apache.hadoop.yarn.webapp.view.InfoBlock; import org.apache.hadoop.yarn.webapp.view.InfoBlock;
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp.*;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.*; import com.google.inject.Inject;
/** /**
* Render a block of HTML for a give job. * Render a block of HTML for a give job.
@ -133,7 +137,7 @@ public class HsJobBlock extends HtmlBlock {
table.tr((odd = !odd) ? _ODD : _EVEN). table.tr((odd = !odd) ? _ODD : _EVEN).
td(String.valueOf(attempt.getAttemptId())). td(String.valueOf(attempt.getAttemptId())).
td(new Date(attempt.getStartTime()).toString()). td(new Date(attempt.getStartTime()).toString()).
td().a(".nodelink", url(WebAppUtil.getSchemePrefix(), td().a(".nodelink", url(MRWebAppUtil.getYARNWebappScheme(),
attempt.getNodeHttpAddress()), attempt.getNodeHttpAddress()),
attempt.getNodeHttpAddress())._(). attempt.getNodeHttpAddress())._().
td().a(".logslink", url(attempt.getShortLogsLink()), td().a(".logslink", url(attempt.getShortLogsLink()),

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.mapreduce.v2.hs.webapp; package org.apache.hadoop.mapreduce.v2.hs.webapp;
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_TYPE;
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_ID; import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_ID;
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_TYPE;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION; import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES; import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID; import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
@ -30,20 +30,17 @@ import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
import java.util.Collection; import java.util.Collection;
import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringEscapeUtils;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.webapp.App; import org.apache.hadoop.mapreduce.v2.app.webapp.App;
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.yarn.util.Times; import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.SubView; import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TD;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TFOOT; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TFOOT;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.THEAD; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.THEAD;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TR; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TR;
@ -149,7 +146,7 @@ public class HsTaskPage extends HsView {
.append(sortId + " ").append(taid).append("\",\"") .append(sortId + " ").append(taid).append("\",\"")
.append(ta.getState().toString()).append("\",\"") .append(ta.getState().toString()).append("\",\"")
.append("<a class='nodelink' href='" + WebAppUtil.getSchemePrefix() + nodeHttpAddr + "'>") .append("<a class='nodelink' href='" + MRWebAppUtil.getYARNWebappScheme() + nodeHttpAddr + "'>")
.append(nodeRackName + "/" + nodeHttpAddr + "</a>\",\"") .append(nodeRackName + "/" + nodeHttpAddr + "</a>\",\"")
.append("<a class='logslink' href='").append(url("logs", nodeIdString .append("<a class='logslink' href='").append(url("logs", nodeIdString

View File

@ -20,6 +20,9 @@ package org.apache.hadoop.mapreduce.v2;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -33,10 +36,11 @@ import org.apache.hadoop.mapred.LocalContainerLauncher;
import org.apache.hadoop.mapred.ShuffleHandler; import org.apache.hadoop.mapred.ShuffleHandler;
import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil;
import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer; import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.service.Service; import org.apache.hadoop.service.Service;
import org.apache.hadoop.util.JarFinder; import org.apache.hadoop.util.JarFinder;
@ -46,7 +50,6 @@ import org.apache.hadoop.yarn.server.MiniYARNCluster;
import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor;
import org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils; import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
import org.hamcrest.core.IsEqual;
/** /**
* Configures and starts the MR-specific components in the YARN cluster. * Configures and starts the MR-specific components in the YARN cluster.
@ -71,6 +74,38 @@ public class MiniMRYarnCluster extends MiniYARNCluster {
addService(historyServerWrapper); addService(historyServerWrapper);
} }
public static String getResolvedMRHistoryWebAppURLWithoutScheme(
Configuration conf, boolean isSSLEnabled) {
InetSocketAddress address = null;
if (isSSLEnabled) {
address =
conf.getSocketAddr(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT);
} else {
address =
conf.getSocketAddr(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT); }
address = NetUtils.getConnectAddress(address);
StringBuffer sb = new StringBuffer();
InetAddress resolved = address.getAddress();
if (resolved == null || resolved.isAnyLocalAddress() ||
resolved.isLoopbackAddress()) {
String lh = address.getHostName();
try {
lh = InetAddress.getLocalHost().getCanonicalHostName();
} catch (UnknownHostException e) {
//Ignore and fallback.
}
sb.append(lh);
} else {
sb.append(address.getHostName());
}
sb.append(":").append(address.getPort());
return sb.toString();
}
@Override @Override
public void serviceInit(Configuration conf) throws Exception { public void serviceInit(Configuration conf) throws Exception {
conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME); conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
@ -159,7 +194,8 @@ public class MiniMRYarnCluster extends MiniYARNCluster {
// pick free random ports. // pick free random ports.
getConfig().set(JHAdminConfig.MR_HISTORY_ADDRESS, getConfig().set(JHAdminConfig.MR_HISTORY_ADDRESS,
hostname + ":0"); hostname + ":0");
WebAppUtil.setJHSWebAppURLWithoutScheme(getConfig(), hostname + ":0"); MRWebAppUtil.setJHSWebappURLWithoutScheme(getConfig(), hostname
+ ":0");
getConfig().set(JHAdminConfig.JHS_ADMIN_ADDRESS, getConfig().set(JHAdminConfig.JHS_ADMIN_ADDRESS,
hostname + ":0"); hostname + ":0");
} }
@ -185,8 +221,8 @@ public class MiniMRYarnCluster extends MiniYARNCluster {
//need to do this because historyServer.init creates a new Configuration //need to do this because historyServer.init creates a new Configuration
getConfig().set(JHAdminConfig.MR_HISTORY_ADDRESS, getConfig().set(JHAdminConfig.MR_HISTORY_ADDRESS,
historyServer.getConfig().get(JHAdminConfig.MR_HISTORY_ADDRESS)); historyServer.getConfig().get(JHAdminConfig.MR_HISTORY_ADDRESS));
WebAppUtil.setJHSWebAppURLWithoutScheme(getConfig(), MRWebAppUtil.setJHSWebappURLWithoutScheme(getConfig(),
WebAppUtil.getJHSWebAppURLWithoutScheme(historyServer.getConfig())); MRWebAppUtil.getJHSWebappURLWithoutScheme(historyServer.getConfig()));
LOG.info("MiniMRYARN ResourceManager address: " + LOG.info("MiniMRYARN ResourceManager address: " +
getConfig().get(YarnConfiguration.RM_ADDRESS)); getConfig().get(YarnConfiguration.RM_ADDRESS));
@ -195,7 +231,8 @@ public class MiniMRYarnCluster extends MiniYARNCluster {
LOG.info("MiniMRYARN HistoryServer address: " + LOG.info("MiniMRYARN HistoryServer address: " +
getConfig().get(JHAdminConfig.MR_HISTORY_ADDRESS)); getConfig().get(JHAdminConfig.MR_HISTORY_ADDRESS));
LOG.info("MiniMRYARN HistoryServer web address: " + LOG.info("MiniMRYARN HistoryServer web address: " +
JHAdminConfig.getResolvedMRHistoryWebAppURLWithoutScheme(getConfig())); getResolvedMRHistoryWebAppURLWithoutScheme(getConfig(),
HttpConfig.isSecure()));
} }
@Override @Override