From d71bb8385bb8723b87239d0316f10deaa61fb1c0 Mon Sep 17 00:00:00 2001 From: Vinod Kumar Vavilapalli Date: Thu, 19 Sep 2013 23:51:14 +0000 Subject: [PATCH] YARN-1203. Changed YARN web-app proxy to handle http and https URLs from AM registration and finish correctly. Contributed by Omkar Vinit Joshi. MAPREDUCE-5515. Fixed MR AM's webapp to depend on a new config mapreduce.ssl.enabled to enable https and disabling it by default as MR AM needs to set up its own certificates etc and not depend on clusters'. Contributed by Omkar Vinit Joshi. svn merge --ignore-ancestry -c 1524864 ../../trunk/ git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1524867 13f79535-47bb-0310-9956-ffa450edef68 --- .../org/apache/hadoop/http/HttpConfig.java | 4 +- hadoop-mapreduce-project/CHANGES.txt | 5 ++ .../hadoop/mapreduce/v2/app/MRAppMaster.java | 16 ++++++ .../v2/app/client/MRClientService.java | 3 ++ .../mapreduce/v2/app/rm/RMCommunicator.java | 23 ++++---- .../v2/app/webapp/AppController.java | 6 ++- .../mapreduce/v2/app/webapp/JobBlock.java | 2 +- .../mapreduce/v2/app/webapp/NavBlock.java | 2 +- .../mapreduce/v2/app/webapp/TaskPage.java | 4 +- .../mapreduce/v2/app/webapp/WebAppUtil.java | 39 ++++++++++++++ .../v2/app/webapp/dao/AMAttemptInfo.java | 3 +- .../org/apache/hadoop/mapreduce/MRConfig.java | 5 ++ .../src/main/resources/mapred-default.xml | 14 +++++ .../mapreduce/v2/hs/JobHistoryServer.java | 6 +++ .../mapreduce/v2/hs/webapp/HsJobBlock.java | 3 +- .../mapreduce/v2/hs/webapp/HsTaskPage.java | 3 +- hadoop-yarn-project/CHANGES.txt | 3 ++ .../FinishApplicationMasterRequest.java | 4 ++ .../RegisterApplicationMasterRequest.java | 4 ++ .../hadoop/yarn/conf/YarnConfiguration.java | 5 +- .../yarn/server/webproxy/ProxyUriUtils.java | 54 +++++++++++++++++-- .../server/webproxy/WebAppProxyServlet.java | 13 ++--- 22 files changed, 190 insertions(+), 31 deletions(-) create mode 100644 hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/WebAppUtil.java diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java index d906d9642a7..d9e219a332a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.http; -import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -38,8 +37,7 @@ public class HttpConfig { CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT); } - @VisibleForTesting - static void setSecure(boolean secure) { + public static void setSecure(boolean secure) { sslEnabled = secure; } diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index 71607f47f63..e269b0ea99f 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -59,6 +59,11 @@ Release 2.2.0 - UNRELEASED MAPREDUCE-5488. Changed MR client to keep trying to reach the application when it sees that on attempt's AM is down. (Jian He via vinodkv) + MAPREDUCE-5515. Fixed MR AM's webapp to depend on a new config + mapreduce.ssl.enabled to enable https and disabling it by default as MR AM + needs to set up its own certificates etc and not depend on clusters'. + (Omkar Vinit Joshi via vinodkv) + Release 2.1.1-beta - 2013-09-23 INCOMPATIBLE CHANGES diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java index cba38c2bff5..e2afdce6d13 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java @@ -36,14 +36,17 @@ import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.mapred.FileOutputCommitter; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.LocalContainerLauncher; import org.apache.hadoop.mapred.TaskAttemptListenerImpl; import org.apache.hadoop.mapred.TaskUmbilicalProtocol; +import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputFormat; @@ -101,6 +104,7 @@ import org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler; import org.apache.hadoop.mapreduce.v2.app.speculate.DefaultSpeculator; import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator; import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent; +import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; @@ -1313,6 +1317,7 @@ public class MRAppMaster extends CompositeService { containerId.getApplicationAttemptId(); long appSubmitTime = Long.parseLong(appSubmitTimeStr); + MRAppMaster appMaster = new MRAppMaster(applicationAttemptId, containerId, nodeHostString, Integer.parseInt(nodePortString), @@ -1322,6 +1327,17 @@ public class MRAppMaster extends CompositeService { new MRAppMasterShutdownHook(appMaster), SHUTDOWN_HOOK_PRIORITY); JobConf conf = new JobConf(new YarnConfiguration()); conf.addResource(new Path(MRJobConfig.JOB_CONF_FILE)); + + // Explicitly disabling SSL for map reduce task as we can't allow MR users + // to gain access to keystore file for opening SSL listener. We can trust + // RM/NM to issue SSL certificates but definitely not MR-AM as it is + // running in user-land. + HttpConfig.setSecure(conf.getBoolean(MRConfig.SSL_ENABLED_KEY, + MRConfig.SSL_ENABLED_KEY_DEFAULT)); + WebAppUtil.setSSLEnabledInYARN(conf.getBoolean( + CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY, + CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT)); + String jobUserName = System .getenv(ApplicationConstants.Environment.USER.name()); conf.set(MRJobConfig.USER_NAME, jobUserName); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java index 181fd3740a9..1661b8ada1c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java @@ -27,8 +27,10 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.mapreduce.JobACL; +import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol; @@ -78,6 +80,7 @@ import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType; import org.apache.hadoop.mapreduce.v2.app.security.authorize.MRAMPolicyProvider; import org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp; +import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java index 7ce4d5d1b6c..cc047619c2c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java @@ -28,7 +28,9 @@ import java.util.concurrent.atomic.AtomicBoolean; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.mapreduce.JobID; +import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.v2.api.records.JobId; @@ -36,12 +38,10 @@ import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.client.ClientService; import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal; -import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent; -import org.apache.hadoop.mapreduce.v2.app.job.event.JobEventType; import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl; +import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.yarn.api.ApplicationMasterProtocol; import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest; @@ -58,8 +58,6 @@ import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; -import com.sun.research.ws.wadl.Response; - /** * Registers/unregisters to RM and sends heartbeats to RM. */ @@ -148,7 +146,13 @@ public abstract class RMCommunicator extends AbstractService if (serviceAddr != null) { request.setHost(serviceAddr.getHostName()); request.setRpcPort(serviceAddr.getPort()); - request.setTrackingUrl(serviceAddr.getHostName() + ":" + clientService.getHttpPort()); + String scheme = "http://"; + if (getConfig().getBoolean(MRConfig.SSL_ENABLED_KEY, + MRConfig.SSL_ENABLED_KEY_DEFAULT)) { + scheme = "https://"; + } + request.setTrackingUrl(scheme + serviceAddr.getHostName() + ":" + + clientService.getHttpPort()); } RegisterApplicationMasterResponse response = scheduler.registerApplicationMaster(request); @@ -190,10 +194,11 @@ public abstract class RMCommunicator extends AbstractService } LOG.info("Setting job diagnostics to " + sb.toString()); - String historyUrl = JobHistoryUtils.getHistoryUrl(getConfig(), - context.getApplicationID()); + String historyUrl = + WebAppUtil.getSchemePrefix() + + JobHistoryUtils.getHistoryUrl(getConfig(), + context.getApplicationID()); LOG.info("History url is " + historyUrl); - FinishApplicationMasterRequest request = FinishApplicationMasterRequest.newInstance(finishState, sb.toString(), historyUrl); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java index da537e5bc71..aff99531752 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java @@ -43,6 +43,7 @@ import org.apache.hadoop.yarn.util.Times; import org.apache.hadoop.yarn.webapp.Controller; import org.apache.hadoop.yarn.webapp.View; +import com.google.common.base.Joiner; import com.google.inject.Inject; /** @@ -50,6 +51,7 @@ import com.google.inject.Inject; */ public class AppController extends Controller implements AMParams { private static final Log LOG = LogFactory.getLog(AppController.class); + private static final Joiner JOINER = Joiner.on(""); protected final App app; @@ -58,7 +60,9 @@ public class AppController extends Controller implements AMParams { super(ctx); this.app = app; set(APP_ID, app.context.getApplicationID().toString()); - set(RM_WEB, YarnConfiguration.getRMWebAppURL(conf)); + set(RM_WEB, + JOINER.join(WebAppUtil.getSchemePrefix(), + YarnConfiguration.getRMWebAppHostAndPort(conf))); } @Inject diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java index 6b80c8c7d07..ccc5e7ead7a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java @@ -104,7 +104,7 @@ public class JobBlock extends HtmlBlock { table.tr(). td(String.valueOf(attempt.getAttemptId())). td(new Date(attempt.getStartTime()).toString()). - td().a(".nodelink", url(HttpConfig.getSchemePrefix(), + td().a(".nodelink", url(WebAppUtil.getSchemePrefix(), attempt.getNodeHttpAddress()), attempt.getNodeHttpAddress())._(). td().a(".logslink", url(attempt.getLogsLink()), diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java index 686045568f1..a2b00e9672d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java @@ -63,7 +63,7 @@ public class NavBlock extends HtmlBlock { li().a(url("conf", jobid), "Configuration")._(). li().a(url("tasks", jobid, "m"), "Map tasks")._(). li().a(url("tasks", jobid, "r"), "Reduce tasks")._(). - li().a(".logslink", url(HttpConfig.getSchemePrefix(), + li().a(".logslink", url(WebAppUtil.getSchemePrefix(), nodeHttpAddress, "node", "containerlogs", thisAmInfo.getContainerId().toString(), app.getJob().getUserName()), diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java index 430117c4e25..26ef371f8a8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java @@ -86,12 +86,12 @@ public class TaskPage extends AppView { .append(ta.getState().toString()).append("\",\"") .append(nodeHttpAddr == null ? "N/A" : - "" + "" + nodeHttpAddr + "") .append("\",\"") .append(ta.getAssignedContainerId() == null ? "N/A" : - "logs") .append("\",\"") diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/WebAppUtil.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/WebAppUtil.java new file mode 100644 index 00000000000..f2ae7daa0bb --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/WebAppUtil.java @@ -0,0 +1,39 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.mapreduce.v2.app.webapp; + + +public class WebAppUtil { + private static boolean isSSLEnabledInYARN; + + public static void setSSLEnabledInYARN(boolean isSSLEnabledInYARN) { + WebAppUtil.isSSLEnabledInYARN = isSSLEnabledInYARN; + } + + public static boolean isSSLEnabledInYARN() { + return isSSLEnabledInYARN; + } + + public static String getSchemePrefix() { + if (isSSLEnabledInYARN) { + return "https://"; + } else { + return "http://"; + } + } +} diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AMAttemptInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AMAttemptInfo.java index dd2e90a9001..a139b5176c2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AMAttemptInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AMAttemptInfo.java @@ -26,6 +26,7 @@ import javax.xml.bind.annotation.XmlRootElement; import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; +import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.NodeId; @@ -63,7 +64,7 @@ public class AMAttemptInfo { ContainerId containerId = amInfo.getContainerId(); if (containerId != null) { this.containerId = containerId.toString(); - this.logsLink = join(HttpConfig.getSchemePrefix() + nodeHttpAddress, + this.logsLink = join(WebAppUtil.getSchemePrefix() + nodeHttpAddress, ujoin("node", "containerlogs", this.containerId, user)); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java index bbac5fcab9d..879f70d0983 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java @@ -84,6 +84,11 @@ public interface MRConfig { "mapreduce.shuffle.ssl.enabled"; public static final boolean SHUFFLE_SSL_ENABLED_DEFAULT = false; + + public static final String SSL_ENABLED_KEY = + "mapreduce.ssl.enabled"; + + public static final boolean SSL_ENABLED_KEY_DEFAULT = false; public static final String SHUFFLE_CONSUMER_PLUGIN = "mapreduce.job.reduce.shuffle.consumer.plugin.class"; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml index 30e97762425..0fff2815a2b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml @@ -545,6 +545,20 @@ + + mapreduce.ssl.enabled + false + + If enabled, MapReduce application master's http server will be + started with SSL enabled. Map reduce AM by default doesn't support SSL. + If MapReduce jobs want SSL support, it is the user's responsibility to + create and manage certificates, keystores and trust-stores with appropriate + permissions. This is only for MapReduce application master and is not used + by job history server. To enable encrypted shuffle this property is not + required, instead refer to (mapreduce.shuffle.ssl.enabled) property. + + + mapreduce.shuffle.ssl.file.buffer.size 65536 diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java index a5e2f1794a0..0e610d34e89 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java @@ -24,8 +24,10 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.MRConfig; +import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil; import org.apache.hadoop.mapreduce.v2.hs.server.HSAdminServer; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; @@ -73,6 +75,10 @@ public class JobHistoryServer extends CompositeService { config.setBoolean(Dispatcher.DISPATCHER_EXIT_ON_ERROR_KEY, true); + // This is required for WebApps to use https if enabled. + WebAppUtil.setSSLEnabledInYARN(conf.getBoolean( + CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY, + CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT)); try { doSecureLogin(conf); } catch(IOException ie) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java index 25b22f0d2aa..5bc44d19fe2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java @@ -27,6 +27,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.job.Job; +import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo; import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo; import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo; @@ -132,7 +133,7 @@ public class HsJobBlock extends HtmlBlock { table.tr((odd = !odd) ? _ODD : _EVEN). td(String.valueOf(attempt.getAttemptId())). td(new Date(attempt.getStartTime()).toString()). - td().a(".nodelink", url(HttpConfig.getSchemePrefix(), + td().a(".nodelink", url(WebAppUtil.getSchemePrefix(), attempt.getNodeHttpAddress()), attempt.getNodeHttpAddress())._(). td().a(".logslink", url(attempt.getShortLogsLink()), diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java index f01ddc31ea9..ba8b68fec2d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java @@ -35,6 +35,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.webapp.App; +import org.apache.hadoop.mapreduce.v2.app.webapp.WebAppUtil; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.util.Times; @@ -148,7 +149,7 @@ public class HsTaskPage extends HsView { .append(sortId + " ").append(taid).append("\",\"") .append(ta.getState().toString()).append("\",\"") - .append("") + .append("") .append(nodeRackName + "/" + nodeHttpAddr + "\",\"") .append("