YARN-1277. Added a policy based configuration for http/https in common HttpServer and using the same in YARN - related

to per project https config support via HADOOP-10022. Contributed by Suresh Srinivas and Omkar Vinit Joshi.


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1529662 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2013-10-06 18:43:36 +00:00
parent 7317e97bd7
commit 21181b6553
17 changed files with 160 additions and 179 deletions

View File

@ -264,5 +264,9 @@ public class CommonConfigurationKeysPublic {
/** Default value for HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN */ /** Default value for HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN */
public static final int HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT = public static final int HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT =
60; 60;
// HTTP policies to be used in configuration
public static final String HTTP_POLICY_HTTP_ONLY = "HTTP_ONLY";
public static final String HTTP_POLICY_HTTPS_ONLY = "HTTPS_ONLY";
} }

View File

@ -28,25 +28,41 @@ import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Unstable @InterfaceStability.Unstable
public class HttpConfig { public class HttpConfig {
private static boolean sslEnabled; private static Policy policy;
public enum Policy {
HTTP_ONLY,
HTTPS_ONLY;
public static Policy fromString(String value) {
if (value.equalsIgnoreCase(CommonConfigurationKeysPublic
.HTTP_POLICY_HTTPS_ONLY)) {
return HTTPS_ONLY;
}
return HTTP_ONLY;
}
}
static { static {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
sslEnabled = conf.getBoolean( boolean sslEnabled = conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY, CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY,
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT); CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT);
policy = sslEnabled ? Policy.HTTPS_ONLY : Policy.HTTP_ONLY;
} }
public static void setSecure(boolean secure) { public static void setPolicy(Policy policy) {
sslEnabled = secure; HttpConfig.policy = policy;
} }
public static boolean isSecure() { public static boolean isSecure() {
return sslEnabled; return policy == Policy.HTTPS_ONLY;
} }
public static String getSchemePrefix() { public static String getSchemePrefix() {
return (isSecure()) ? "https://" : "http://"; return (isSecure()) ? "https://" : "http://";
} }
public static String getScheme(Policy policy) {
return policy == Policy.HTTPS_ONLY ? "https://" : "http://";
}
} }

View File

@ -54,7 +54,7 @@ public class TestSSLHttpServer extends HttpServerFunctionalTest {
@Before @Before
public void setup() throws Exception { public void setup() throws Exception {
HttpConfig.setSecure(true); HttpConfig.setPolicy(HttpConfig.Policy.HTTPS_ONLY);
File base = new File(BASEDIR); File base = new File(BASEDIR);
FileUtil.fullyDelete(base); FileUtil.fullyDelete(base);
base.mkdirs(); base.mkdirs();
@ -89,7 +89,7 @@ public class TestSSLHttpServer extends HttpServerFunctionalTest {
String classpathDir = String classpathDir =
KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class); KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class);
new File(classpathDir, CONFIG_SITE_XML).delete(); new File(classpathDir, CONFIG_SITE_XML).delete();
HttpConfig.setSecure(false); HttpConfig.setPolicy(HttpConfig.Policy.HTTP_ONLY);
} }

View File

@ -18,60 +18,29 @@
package org.apache.hadoop.mapreduce.v2.app; package org.apache.hadoop.mapreduce.v2.app;
import java.io.IOException; import com.google.common.annotations.VisibleForTesting;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapred.FileOutputCommitter; import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapred.LocalContainerLauncher;
import org.apache.hadoop.mapred.TaskAttemptListenerImpl;
import org.apache.hadoop.mapred.TaskUmbilicalProtocol;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.jobhistory.*;
import org.apache.hadoop.mapreduce.jobhistory.AMStartedEvent;
import org.apache.hadoop.mapreduce.jobhistory.EventReader;
import org.apache.hadoop.mapreduce.jobhistory.EventType;
import org.apache.hadoop.mapreduce.jobhistory.HistoryEvent;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryCopyService;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
import org.apache.hadoop.mapreduce.security.TokenCache; import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager; import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; import org.apache.hadoop.mapreduce.v2.api.records.*;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.client.ClientService; import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
import org.apache.hadoop.mapreduce.v2.app.client.MRClientService; import org.apache.hadoop.mapreduce.v2.app.client.MRClientService;
@ -82,30 +51,17 @@ import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal; import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.*;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEventType;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobFinishEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobStartEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType;
import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl; import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher;
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent;
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl;
import org.apache.hadoop.mapreduce.v2.app.local.LocalContainerAllocator; import org.apache.hadoop.mapreduce.v2.app.local.LocalContainerAllocator;
import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics; import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator; import org.apache.hadoop.mapreduce.v2.app.rm.*;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent;
import org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator;
import org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator;
import org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor;
import org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler;
import org.apache.hadoop.mapreduce.v2.app.speculate.DefaultSpeculator; import org.apache.hadoop.mapreduce.v2.app.speculate.DefaultSpeculator;
import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator; import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator;
import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent; import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
@ -139,7 +95,14 @@ import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.SystemClock; import org.apache.hadoop.yarn.util.SystemClock;
import com.google.common.annotations.VisibleForTesting; import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.security.PrivilegedExceptionAction;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
/** /**
* The Map-Reduce Application Master. * The Map-Reduce Application Master.
@ -1351,7 +1314,7 @@ public class MRAppMaster extends CompositeService {
// RM/NM to issue SSL certificates but definitely not MR-AM as it is // RM/NM to issue SSL certificates but definitely not MR-AM as it is
// running in user-land. // running in user-land.
MRWebAppUtil.initialize(conf); MRWebAppUtil.initialize(conf);
HttpConfig.setSecure(MRWebAppUtil.isSSLEnabledInMRAM()); HttpConfig.setPolicy(HttpConfig.Policy.HTTP_ONLY);
// log the system properties // log the system properties
String systemPropsToLog = MRApps.getSystemPropertiesToLog(conf); String systemPropsToLog = MRApps.getSystemPropertiesToLog(conf);
if (systemPropsToLog != null) { if (systemPropsToLog != null) {

View File

@ -63,7 +63,8 @@ public class AppController extends Controller implements AMParams {
set(APP_ID, app.context.getApplicationID().toString()); set(APP_ID, app.context.getApplicationID().toString());
set(RM_WEB, set(RM_WEB,
JOINER.join(MRWebAppUtil.getYARNWebappScheme(), JOINER.join(MRWebAppUtil.getYARNWebappScheme(),
WebAppUtils.getResolvedRMWebAppURLWithoutScheme(conf))); WebAppUtils.getResolvedRMWebAppURLWithoutScheme(conf,
MRWebAppUtil.getYARNHttpPolicy())));
} }
@Inject @Inject

View File

@ -18,14 +18,9 @@
package org.apache.hadoop.mapreduce.v2.jobhistory; package org.apache.hadoop.mapreduce.v2.jobhistory;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.net.NetUtils;
/** /**
* Stores Job History configuration keys that can be set by administrators of * Stores Job History configuration keys that can be set by administrators of
@ -129,10 +124,11 @@ public class JHAdminConfig {
public static final String MR_HISTORY_PRINCIPAL = public static final String MR_HISTORY_PRINCIPAL =
MR_HISTORY_PREFIX + "principal"; MR_HISTORY_PREFIX + "principal";
/** To enable SSL in MR history server */ /** To enable https in MR history server */
public static final String MR_HS_SSL_ENABLED = MR_HISTORY_PREFIX public static final String MR_HS_HTTP_POLICY = MR_HISTORY_PREFIX
+ "ssl.enabled"; + "http.policy";
public static boolean DEFAULT_MR_HS_SSL_ENABLED = false; public static String DEFAULT_MR_HS_HTTP_POLICY =
CommonConfigurationKeysPublic.HTTP_POLICY_HTTP_ONLY;
/**The address the history server webapp is on.*/ /**The address the history server webapp is on.*/
public static final String MR_HISTORY_WEBAPP_ADDRESS = public static final String MR_HISTORY_WEBAPP_ADDRESS =

View File

@ -17,24 +17,25 @@
*/ */
package org.apache.hadoop.mapreduce.v2.util; package org.apache.hadoop.mapreduce.v2.util;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import java.net.InetAddress; import java.net.InetAddress;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.net.UnknownHostException; import java.net.UnknownHostException;
import java.util.Iterator; import java.util.Iterator;
import org.apache.hadoop.classification.InterfaceAudience.Private; import static org.apache.hadoop.http.HttpConfig.Policy;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
@Private @Private
@Evolving @Evolving
@ -42,63 +43,44 @@ public class MRWebAppUtil {
private static final Splitter ADDR_SPLITTER = Splitter.on(':').trimResults(); private static final Splitter ADDR_SPLITTER = Splitter.on(':').trimResults();
private static final Joiner JOINER = Joiner.on(""); private static final Joiner JOINER = Joiner.on("");
private static boolean isSSLEnabledInYARN; private static Policy httpPolicyInYarn;
private static boolean isSSLEnabledInJHS; private static Policy httpPolicyInJHS;
private static boolean isSSLEnabledInMRAM;
public static void initialize(Configuration conf) { public static void initialize(Configuration conf) {
setSSLEnabledInYARN(conf.getBoolean( setHttpPolicyInYARN(conf.get(
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY, YarnConfiguration.YARN_HTTP_POLICY_KEY,
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT)); YarnConfiguration.YARN_HTTP_POLICY_DEFAULT));
setSSLEnabledInJHS(conf.getBoolean(JHAdminConfig.MR_HS_SSL_ENABLED, setHttpPolicyInJHS(conf.get(JHAdminConfig.MR_HS_HTTP_POLICY,
JHAdminConfig.DEFAULT_MR_HS_SSL_ENABLED)); JHAdminConfig.DEFAULT_MR_HS_HTTP_POLICY));
setSSLEnabledInMRAM(conf.getBoolean(MRConfig.SSL_ENABLED_KEY,
MRConfig.SSL_ENABLED_KEY_DEFAULT));
} }
private static void setSSLEnabledInYARN(boolean isSSLEnabledInYARN) { private static void setHttpPolicyInJHS(String policy) {
MRWebAppUtil.isSSLEnabledInYARN = isSSLEnabledInYARN; MRWebAppUtil.httpPolicyInJHS = Policy.fromString(policy);
} }
private static void setSSLEnabledInJHS(boolean isSSLEnabledInJHS) { private static void setHttpPolicyInYARN(String policy) {
MRWebAppUtil.isSSLEnabledInJHS = isSSLEnabledInJHS; MRWebAppUtil.httpPolicyInYarn = Policy.fromString(policy);
} }
private static void setSSLEnabledInMRAM(boolean isSSLEnabledInMRAM) { public static Policy getJHSHttpPolicy() {
MRWebAppUtil.isSSLEnabledInMRAM = isSSLEnabledInMRAM; return MRWebAppUtil.httpPolicyInJHS;
} }
public static boolean isSSLEnabledInYARN() { public static Policy getYARNHttpPolicy() {
return isSSLEnabledInYARN; return MRWebAppUtil.httpPolicyInYarn;
}
public static boolean isSSLEnabledInJHS() {
return isSSLEnabledInJHS;
}
public static boolean isSSLEnabledInMRAM() {
return isSSLEnabledInMRAM;
} }
public static String getYARNWebappScheme() { public static String getYARNWebappScheme() {
if (isSSLEnabledInYARN) { return HttpConfig.getScheme(httpPolicyInYarn);
return "https://";
} else {
return "http://";
}
} }
public static String getJHSWebappScheme() { public static String getJHSWebappScheme() {
if (isSSLEnabledInJHS) { return HttpConfig.getScheme(httpPolicyInJHS);
return "https://";
} else {
return "http://";
}
} }
public static void setJHSWebappURLWithoutScheme(Configuration conf, public static void setJHSWebappURLWithoutScheme(Configuration conf,
String hostAddress) { String hostAddress) {
if (isSSLEnabledInJHS) { if (httpPolicyInJHS == Policy.HTTPS_ONLY) {
conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS, hostAddress); conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS, hostAddress);
} else { } else {
conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, hostAddress); conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, hostAddress);
@ -106,7 +88,7 @@ public class MRWebAppUtil {
} }
public static String getJHSWebappURLWithoutScheme(Configuration conf) { public static String getJHSWebappURLWithoutScheme(Configuration conf) {
if (isSSLEnabledInJHS) { if (httpPolicyInJHS == Policy.HTTPS_ONLY) {
return conf.get(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS, return conf.get(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS); JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS);
} else { } else {
@ -120,7 +102,7 @@ public class MRWebAppUtil {
} }
public static InetSocketAddress getJHSWebBindAddress(Configuration conf) { public static InetSocketAddress getJHSWebBindAddress(Configuration conf) {
if (isSSLEnabledInJHS) { if (httpPolicyInJHS == Policy.HTTPS_ONLY) {
return conf.getSocketAddr(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS, return conf.getSocketAddr(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS, JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT); JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT);
@ -168,26 +150,18 @@ public class MRWebAppUtil {
} }
private static int getDefaultJHSWebappPort() { private static int getDefaultJHSWebappPort() {
if (isSSLEnabledInJHS) { return httpPolicyInJHS == Policy.HTTPS_ONLY ?
return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT; JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT:
} else { JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT;
return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT;
}
} }
private static String getDefaultJHSWebappURLWithoutScheme() { private static String getDefaultJHSWebappURLWithoutScheme() {
if (isSSLEnabledInJHS) { return httpPolicyInJHS == Policy.HTTPS_ONLY ?
return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS; JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS :
} else { JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS;
return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS;
}
} }
public static String getAMWebappScheme(Configuration conf) { public static String getAMWebappScheme(Configuration conf) {
if (isSSLEnabledInMRAM) {
return "https://";
} else {
return "http://"; return "http://";
} }
} }
}

View File

@ -85,11 +85,6 @@ public interface MRConfig {
public static final boolean SHUFFLE_SSL_ENABLED_DEFAULT = false; public static final boolean SHUFFLE_SSL_ENABLED_DEFAULT = false;
public static final String SSL_ENABLED_KEY =
"mapreduce.am.ssl.enabled";
public static final boolean SSL_ENABLED_KEY_DEFAULT = false;
public static final String SHUFFLE_CONSUMER_PLUGIN = public static final String SHUFFLE_CONSUMER_PLUGIN =
"mapreduce.job.reduce.shuffle.consumer.plugin.class"; "mapreduce.job.reduce.shuffle.consumer.plugin.class";

View File

@ -289,20 +289,6 @@
</description> </description>
</property> </property>
<property>
<name>mapreduce.am.ssl.enabled</name>
<value>false</value>
<description>
If enabled, MapReduce application master's http server will be
started with SSL enabled. Map reduce AM by default doesn't support SSL.
If MapReduce jobs want SSL support, it is the user's responsibility to
create and manage certificates, keystores and trust-stores with appropriate
permissions. This is only for MapReduce application master and is not used
by job history server. To enable encrypted shuffle this property is not
required, instead refer to (mapreduce.shuffle.ssl.enabled) property.
</description>
</property>
<property> <property>
<name>mapreduce.shuffle.ssl.file.buffer.size</name> <name>mapreduce.shuffle.ssl.file.buffer.size</name>
<value>65536</value> <value>65536</value>
@ -1235,11 +1221,13 @@
</property> </property>
<property> <property>
<name>mapreduce.jobhistory.ssl.enabled</name> <name>mapreduce.jobhistory.http.policy</name>
<value>false</value> <value>HTTP_ONLY</value>
<description> <description>
Whether to use SSL for the HTTP endpoints. If set to true, the This configures the HTTP endpoint for JobHistoryServer web UI.
JobHistoryServer web UIs will be served over HTTPS instead HTTP. The following values are supported:
- HTTP_ONLY : Service is provided only on http
- HTTPS_ONLY : Service is provided only on https
</description> </description>
</property> </property>
</configuration> </configuration>

View File

@ -24,7 +24,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRConfig;
@ -120,7 +119,7 @@ public class JobHistoryServer extends CompositeService {
// This is required for WebApps to use https if enabled. // This is required for WebApps to use https if enabled.
MRWebAppUtil.initialize(getConfig()); MRWebAppUtil.initialize(getConfig());
HttpConfig.setSecure(MRWebAppUtil.isSSLEnabledInJHS()); HttpConfig.setPolicy(MRWebAppUtil.getJHSHttpPolicy());
try { try {
doSecureLogin(conf); doSecureLogin(conf);
} catch(IOException ie) { } catch(IOException ie) {

View File

@ -102,6 +102,10 @@ Release 2.1.2 - UNRELEASED
YARN-1213. Restore config to ban submitting to undeclared pools in the YARN-1213. Restore config to ban submitting to undeclared pools in the
Fair Scheduler. (Sandy Ryza) Fair Scheduler. (Sandy Ryza)
YARN-1277. Added a policy based configuration for http/https in common
HttpServer and using the same in YARN - related to per project https config
support via HADOOP-10022. (Suresh Srinivas and Omkar Vinit Joshi via vinodkv)
OPTIMIZATIONS OPTIMIZATIONS
BUG FIXES BUG FIXES

View File

@ -24,6 +24,7 @@ import java.util.Arrays;
import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants;
@ -863,6 +864,11 @@ public class YarnConfiguration extends Configuration {
YARN_PREFIX + "client.max-nodemanagers-proxies"; YARN_PREFIX + "client.max-nodemanagers-proxies";
public static final int DEFAULT_NM_CLIENT_MAX_NM_PROXIES = 500; public static final int DEFAULT_NM_CLIENT_MAX_NM_PROXIES = 500;
public static final String YARN_HTTP_POLICY_KEY =
YARN_PREFIX + "http.policy";
public static final String YARN_HTTP_POLICY_DEFAULT =
CommonConfigurationKeysPublic.HTTP_POLICY_HTTP_ONLY;
public YarnConfiguration() { public YarnConfiguration() {
super(); super();
} }

View File

@ -25,6 +25,7 @@ import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpConfig.Policy;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
@ -97,8 +98,14 @@ public class WebAppUtils {
} }
public static String getResolvedRMWebAppURLWithoutScheme(Configuration conf) { public static String getResolvedRMWebAppURLWithoutScheme(Configuration conf) {
return getResolvedRMWebAppURLWithoutScheme(conf,
HttpConfig.isSecure() ? Policy.HTTPS_ONLY : Policy.HTTP_ONLY);
}
public static String getResolvedRMWebAppURLWithoutScheme(Configuration conf,
Policy httpPolicy) {
InetSocketAddress address = null; InetSocketAddress address = null;
if (HttpConfig.isSecure()) { if (httpPolicy == Policy.HTTPS_ONLY) {
address = address =
conf.getSocketAddr(YarnConfiguration.RM_WEBAPP_HTTPS_ADDRESS, conf.getSocketAddr(YarnConfiguration.RM_WEBAPP_HTTPS_ADDRESS,
YarnConfiguration.DEFAULT_RM_WEBAPP_HTTPS_ADDRESS, YarnConfiguration.DEFAULT_RM_WEBAPP_HTTPS_ADDRESS,

View File

@ -99,6 +99,17 @@
<value>50</value> <value>50</value>
</property> </property>
<property>
<description>
This configures the HTTP endpoint for Yarn Daemons.The following
values are supported:
- HTTP_ONLY : Service is provided only on http
- HTTPS_ONLY : Service is provided only on https
</description>
<name>yarn.http.policy</name>
<value>HTTP_ONLY</value>
</property>
<property> <property>
<description>The http address of the RM web application.</description> <description>The http address of the RM web application.</description>
<name>yarn.resourcemanager.webapp.address</name> <name>yarn.resourcemanager.webapp.address</name>

View File

@ -28,6 +28,8 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpConfig.Policy;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.service.CompositeService; import org.apache.hadoop.service.CompositeService;
@ -395,9 +397,16 @@ public class NodeManager extends CompositeService
StringUtils.startupShutdownMessage(NodeManager.class, args, LOG); StringUtils.startupShutdownMessage(NodeManager.class, args, LOG);
NodeManager nodeManager = new NodeManager(); NodeManager nodeManager = new NodeManager();
Configuration conf = new YarnConfiguration(); Configuration conf = new YarnConfiguration();
setHttpPolicy(conf);
nodeManager.initAndStartNodeManager(conf, false); nodeManager.initAndStartNodeManager(conf, false);
} }
private static void setHttpPolicy(Configuration conf) {
HttpConfig.setPolicy(Policy.fromString(conf.get(
YarnConfiguration.YARN_HTTP_POLICY_KEY,
YarnConfiguration.YARN_HTTP_POLICY_DEFAULT)));
}
@VisibleForTesting @VisibleForTesting
@Private @Private
public NodeStatusUpdater getNodeStatusUpdater() { public NodeStatusUpdater getNodeStatusUpdater() {

View File

@ -28,6 +28,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpConfig.Policy;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.source.JvmMetrics; import org.apache.hadoop.metrics2.source.JvmMetrics;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
@ -935,6 +936,7 @@ public class ResourceManager extends CompositeService implements Recoverable {
ShutdownHookManager.get().addShutdownHook( ShutdownHookManager.get().addShutdownHook(
new CompositeServiceShutdownHook(resourceManager), new CompositeServiceShutdownHook(resourceManager),
SHUTDOWN_HOOK_PRIORITY); SHUTDOWN_HOOK_PRIORITY);
setHttpPolicy(conf);
resourceManager.init(conf); resourceManager.init(conf);
resourceManager.start(); resourceManager.start();
} catch (Throwable t) { } catch (Throwable t) {
@ -942,4 +944,10 @@ public class ResourceManager extends CompositeService implements Recoverable {
System.exit(-1); System.exit(-1);
} }
} }
private static void setHttpPolicy(Configuration conf) {
HttpConfig.setPolicy(Policy.fromString(conf.get(
YarnConfiguration.YARN_HTTP_POLICY_KEY,
YarnConfiguration.YARN_HTTP_POLICY_DEFAULT)));
}
} }

View File

@ -18,7 +18,11 @@
package org.apache.hadoop.yarn.server.webproxy; package org.apache.hadoop.yarn.server.webproxy;
import static org.apache.hadoop.yarn.util.StringHelper.ujoin; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.util.TrackingUriPlugin;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.net.URI; import java.net.URI;
@ -26,11 +30,7 @@ import java.net.URISyntaxException;
import java.net.URLEncoder; import java.net.URLEncoder;
import java.util.List; import java.util.List;
import org.apache.commons.logging.Log; import static org.apache.hadoop.yarn.util.StringHelper.ujoin;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.util.TrackingUriPlugin;
public class ProxyUriUtils { public class ProxyUriUtils {
@SuppressWarnings("unused") @SuppressWarnings("unused")
@ -148,7 +148,7 @@ public class ProxyUriUtils {
/* /*
* check is made to make sure if AM reports with scheme then it will be * check is made to make sure if AM reports with scheme then it will be
* used by default otherwise it will default to the one configured using * used by default otherwise it will default to the one configured using
* "hadoop.ssl.enabled". * "yarn.http.policy".
*/ */
return new URI(HttpConfig.getSchemePrefix() + url); return new URI(HttpConfig.getSchemePrefix() + url);
} else { } else {
@ -168,7 +168,7 @@ public class ProxyUriUtils {
/* /*
* check is made to make sure if AM reports with scheme then it will be * check is made to make sure if AM reports with scheme then it will be
* used by default otherwise it will default to the one configured using * used by default otherwise it will default to the one configured using
* "hadoop.ssl.enabled". * "yarn.http.policy".
*/ */
return new URI(scheme + "://" + noSchemeUrl); return new URI(scheme + "://" + noSchemeUrl);
} else { } else {