YARN-1277. Added a policy based configuration for http/https in common HttpServer and using the same in YARN - related

to per project https config support via HADOOP-10022. Contributed by Suresh Srinivas and Omkar Vinit Joshi.


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1529662 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2013-10-06 18:43:36 +00:00
parent 7317e97bd7
commit 21181b6553
17 changed files with 160 additions and 179 deletions

View File

@ -264,5 +264,9 @@ public class CommonConfigurationKeysPublic {
/** Default value for HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN */
public static final int HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT =
60;
// HTTP policies to be used in configuration
public static final String HTTP_POLICY_HTTP_ONLY = "HTTP_ONLY";
public static final String HTTP_POLICY_HTTPS_ONLY = "HTTPS_ONLY";
}

View File

@ -28,25 +28,41 @@ import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class HttpConfig {
private static boolean sslEnabled;
private static Policy policy;
public enum Policy {
HTTP_ONLY,
HTTPS_ONLY;
public static Policy fromString(String value) {
if (value.equalsIgnoreCase(CommonConfigurationKeysPublic
.HTTP_POLICY_HTTPS_ONLY)) {
return HTTPS_ONLY;
}
return HTTP_ONLY;
}
}
static {
Configuration conf = new Configuration();
sslEnabled = conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY,
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT);
boolean sslEnabled = conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY,
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT);
policy = sslEnabled ? Policy.HTTPS_ONLY : Policy.HTTP_ONLY;
}
public static void setSecure(boolean secure) {
sslEnabled = secure;
public static void setPolicy(Policy policy) {
HttpConfig.policy = policy;
}
public static boolean isSecure() {
return sslEnabled;
return policy == Policy.HTTPS_ONLY;
}
public static String getSchemePrefix() {
return (isSecure()) ? "https://" : "http://";
}
public static String getScheme(Policy policy) {
return policy == Policy.HTTPS_ONLY ? "https://" : "http://";
}
}

View File

@ -54,7 +54,7 @@ public class TestSSLHttpServer extends HttpServerFunctionalTest {
@Before
public void setup() throws Exception {
HttpConfig.setSecure(true);
HttpConfig.setPolicy(HttpConfig.Policy.HTTPS_ONLY);
File base = new File(BASEDIR);
FileUtil.fullyDelete(base);
base.mkdirs();
@ -89,7 +89,7 @@ public class TestSSLHttpServer extends HttpServerFunctionalTest {
String classpathDir =
KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class);
new File(classpathDir, CONFIG_SITE_XML).delete();
HttpConfig.setSecure(false);
HttpConfig.setPolicy(HttpConfig.Policy.HTTP_ONLY);
}

View File

@ -18,60 +18,29 @@
package org.apache.hadoop.mapreduce.v2.app;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapred.FileOutputCommitter;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.LocalContainerLauncher;
import org.apache.hadoop.mapred.TaskAttemptListenerImpl;
import org.apache.hadoop.mapred.TaskUmbilicalProtocol;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.jobhistory.AMStartedEvent;
import org.apache.hadoop.mapreduce.jobhistory.EventReader;
import org.apache.hadoop.mapreduce.jobhistory.EventType;
import org.apache.hadoop.mapreduce.jobhistory.HistoryEvent;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryCopyService;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
import org.apache.hadoop.mapreduce.jobhistory.*;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.api.records.*;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
import org.apache.hadoop.mapreduce.v2.app.client.MRClientService;
@ -82,30 +51,17 @@ import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEventType;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobFinishEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobStartEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType;
import org.apache.hadoop.mapreduce.v2.app.job.event.*;
import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher;
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent;
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl;
import org.apache.hadoop.mapreduce.v2.app.local.LocalContainerAllocator;
import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent;
import org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator;
import org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator;
import org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor;
import org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler;
import org.apache.hadoop.mapreduce.v2.app.rm.*;
import org.apache.hadoop.mapreduce.v2.app.speculate.DefaultSpeculator;
import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator;
import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
@ -139,7 +95,14 @@ import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.SystemClock;
import com.google.common.annotations.VisibleForTesting;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.security.PrivilegedExceptionAction;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* The Map-Reduce Application Master.
@ -1351,7 +1314,7 @@ public class MRAppMaster extends CompositeService {
// RM/NM to issue SSL certificates but definitely not MR-AM as it is
// running in user-land.
MRWebAppUtil.initialize(conf);
HttpConfig.setSecure(MRWebAppUtil.isSSLEnabledInMRAM());
HttpConfig.setPolicy(HttpConfig.Policy.HTTP_ONLY);
// log the system properties
String systemPropsToLog = MRApps.getSystemPropertiesToLog(conf);
if (systemPropsToLog != null) {

View File

@ -63,7 +63,8 @@ public class AppController extends Controller implements AMParams {
set(APP_ID, app.context.getApplicationID().toString());
set(RM_WEB,
JOINER.join(MRWebAppUtil.getYARNWebappScheme(),
WebAppUtils.getResolvedRMWebAppURLWithoutScheme(conf)));
WebAppUtils.getResolvedRMWebAppURLWithoutScheme(conf,
MRWebAppUtil.getYARNHttpPolicy())));
}
@Inject

View File

@ -18,14 +18,9 @@
package org.apache.hadoop.mapreduce.v2.jobhistory;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
/**
* Stores Job History configuration keys that can be set by administrators of
@ -129,10 +124,11 @@ public class JHAdminConfig {
public static final String MR_HISTORY_PRINCIPAL =
MR_HISTORY_PREFIX + "principal";
/** To enable SSL in MR history server */
public static final String MR_HS_SSL_ENABLED = MR_HISTORY_PREFIX
+ "ssl.enabled";
public static boolean DEFAULT_MR_HS_SSL_ENABLED = false;
/** To enable https in MR history server */
public static final String MR_HS_HTTP_POLICY = MR_HISTORY_PREFIX
+ "http.policy";
public static String DEFAULT_MR_HS_HTTP_POLICY =
CommonConfigurationKeysPublic.HTTP_POLICY_HTTP_ONLY;
/**The address the history server webapp is on.*/
public static final String MR_HISTORY_WEBAPP_ADDRESS =

View File

@ -17,24 +17,25 @@
*/
package org.apache.hadoop.mapreduce.v2.util;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.Iterator;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import static org.apache.hadoop.http.HttpConfig.Policy;
@Private
@Evolving
@ -42,63 +43,44 @@ public class MRWebAppUtil {
private static final Splitter ADDR_SPLITTER = Splitter.on(':').trimResults();
private static final Joiner JOINER = Joiner.on("");
private static boolean isSSLEnabledInYARN;
private static boolean isSSLEnabledInJHS;
private static boolean isSSLEnabledInMRAM;
private static Policy httpPolicyInYarn;
private static Policy httpPolicyInJHS;
public static void initialize(Configuration conf) {
setSSLEnabledInYARN(conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY,
CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT));
setSSLEnabledInJHS(conf.getBoolean(JHAdminConfig.MR_HS_SSL_ENABLED,
JHAdminConfig.DEFAULT_MR_HS_SSL_ENABLED));
setSSLEnabledInMRAM(conf.getBoolean(MRConfig.SSL_ENABLED_KEY,
MRConfig.SSL_ENABLED_KEY_DEFAULT));
setHttpPolicyInYARN(conf.get(
YarnConfiguration.YARN_HTTP_POLICY_KEY,
YarnConfiguration.YARN_HTTP_POLICY_DEFAULT));
setHttpPolicyInJHS(conf.get(JHAdminConfig.MR_HS_HTTP_POLICY,
JHAdminConfig.DEFAULT_MR_HS_HTTP_POLICY));
}
private static void setSSLEnabledInYARN(boolean isSSLEnabledInYARN) {
MRWebAppUtil.isSSLEnabledInYARN = isSSLEnabledInYARN;
private static void setHttpPolicyInJHS(String policy) {
MRWebAppUtil.httpPolicyInJHS = Policy.fromString(policy);
}
private static void setSSLEnabledInJHS(boolean isSSLEnabledInJHS) {
MRWebAppUtil.isSSLEnabledInJHS = isSSLEnabledInJHS;
private static void setHttpPolicyInYARN(String policy) {
MRWebAppUtil.httpPolicyInYarn = Policy.fromString(policy);
}
private static void setSSLEnabledInMRAM(boolean isSSLEnabledInMRAM) {
MRWebAppUtil.isSSLEnabledInMRAM = isSSLEnabledInMRAM;
public static Policy getJHSHttpPolicy() {
return MRWebAppUtil.httpPolicyInJHS;
}
public static boolean isSSLEnabledInYARN() {
return isSSLEnabledInYARN;
}
public static boolean isSSLEnabledInJHS() {
return isSSLEnabledInJHS;
}
public static boolean isSSLEnabledInMRAM() {
return isSSLEnabledInMRAM;
public static Policy getYARNHttpPolicy() {
return MRWebAppUtil.httpPolicyInYarn;
}
public static String getYARNWebappScheme() {
if (isSSLEnabledInYARN) {
return "https://";
} else {
return "http://";
}
return HttpConfig.getScheme(httpPolicyInYarn);
}
public static String getJHSWebappScheme() {
if (isSSLEnabledInJHS) {
return "https://";
} else {
return "http://";
}
return HttpConfig.getScheme(httpPolicyInJHS);
}
public static void setJHSWebappURLWithoutScheme(Configuration conf,
String hostAddress) {
if (isSSLEnabledInJHS) {
if (httpPolicyInJHS == Policy.HTTPS_ONLY) {
conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS, hostAddress);
} else {
conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, hostAddress);
@ -106,7 +88,7 @@ public class MRWebAppUtil {
}
public static String getJHSWebappURLWithoutScheme(Configuration conf) {
if (isSSLEnabledInJHS) {
if (httpPolicyInJHS == Policy.HTTPS_ONLY) {
return conf.get(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS);
} else {
@ -120,7 +102,7 @@ public class MRWebAppUtil {
}
public static InetSocketAddress getJHSWebBindAddress(Configuration conf) {
if (isSSLEnabledInJHS) {
if (httpPolicyInJHS == Policy.HTTPS_ONLY) {
return conf.getSocketAddr(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT);
@ -168,26 +150,18 @@ public class MRWebAppUtil {
}
private static int getDefaultJHSWebappPort() {
if (isSSLEnabledInJHS) {
return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT;
} else {
return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT;
}
return httpPolicyInJHS == Policy.HTTPS_ONLY ?
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT:
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT;
}
private static String getDefaultJHSWebappURLWithoutScheme() {
if (isSSLEnabledInJHS) {
return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS;
} else {
return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS;
}
return httpPolicyInJHS == Policy.HTTPS_ONLY ?
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS :
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS;
}
public static String getAMWebappScheme(Configuration conf) {
if (isSSLEnabledInMRAM) {
return "https://";
} else {
return "http://";
}
return "http://";
}
}

View File

@ -84,11 +84,6 @@ public interface MRConfig {
"mapreduce.shuffle.ssl.enabled";
public static final boolean SHUFFLE_SSL_ENABLED_DEFAULT = false;
public static final String SSL_ENABLED_KEY =
"mapreduce.am.ssl.enabled";
public static final boolean SSL_ENABLED_KEY_DEFAULT = false;
public static final String SHUFFLE_CONSUMER_PLUGIN =
"mapreduce.job.reduce.shuffle.consumer.plugin.class";

View File

@ -289,20 +289,6 @@
</description>
</property>
<property>
<name>mapreduce.am.ssl.enabled</name>
<value>false</value>
<description>
If enabled, MapReduce application master's http server will be
started with SSL enabled. Map reduce AM by default doesn't support SSL.
If MapReduce jobs want SSL support, it is the user's responsibility to
create and manage certificates, keystores and trust-stores with appropriate
permissions. This is only for MapReduce application master and is not used
by job history server. To enable encrypted shuffle this property is not
required, instead refer to (mapreduce.shuffle.ssl.enabled) property.
</description>
</property>
<property>
<name>mapreduce.shuffle.ssl.file.buffer.size</name>
<value>65536</value>
@ -1235,11 +1221,13 @@
</property>
<property>
<name>mapreduce.jobhistory.ssl.enabled</name>
<value>false</value>
<name>mapreduce.jobhistory.http.policy</name>
<value>HTTP_ONLY</value>
<description>
Whether to use SSL for the HTTP endpoints. If set to true, the
JobHistoryServer web UIs will be served over HTTPS instead HTTP.
This configures the HTTP endpoint for JobHistoryServer web UI.
The following values are supported:
- HTTP_ONLY : Service is provided only on http
- HTTPS_ONLY : Service is provided only on https
</description>
</property>
</configuration>

View File

@ -24,7 +24,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.MRConfig;
@ -120,7 +119,7 @@ public class JobHistoryServer extends CompositeService {
// This is required for WebApps to use https if enabled.
MRWebAppUtil.initialize(getConfig());
HttpConfig.setSecure(MRWebAppUtil.isSSLEnabledInJHS());
HttpConfig.setPolicy(MRWebAppUtil.getJHSHttpPolicy());
try {
doSecureLogin(conf);
} catch(IOException ie) {

View File

@ -102,6 +102,10 @@ Release 2.1.2 - UNRELEASED
YARN-1213. Restore config to ban submitting to undeclared pools in the
Fair Scheduler. (Sandy Ryza)
YARN-1277. Added a policy based configuration for http/https in common
HttpServer and using the same in YARN - related to per project https config
support via HADOOP-10022. (Suresh Srinivas and Omkar Vinit Joshi via vinodkv)
OPTIMIZATIONS
BUG FIXES

View File

@ -24,6 +24,7 @@ import java.util.Arrays;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationConstants;
@ -862,7 +863,12 @@ public class YarnConfiguration extends Configuration {
public static final String NM_CLIENT_MAX_NM_PROXIES =
YARN_PREFIX + "client.max-nodemanagers-proxies";
public static final int DEFAULT_NM_CLIENT_MAX_NM_PROXIES = 500;
public static final String YARN_HTTP_POLICY_KEY =
YARN_PREFIX + "http.policy";
public static final String YARN_HTTP_POLICY_DEFAULT =
CommonConfigurationKeysPublic.HTTP_POLICY_HTTP_ONLY;
public YarnConfiguration() {
super();
}

View File

@ -25,6 +25,7 @@ import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpConfig.Policy;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
@ -97,8 +98,14 @@ public class WebAppUtils {
}
public static String getResolvedRMWebAppURLWithoutScheme(Configuration conf) {
return getResolvedRMWebAppURLWithoutScheme(conf,
HttpConfig.isSecure() ? Policy.HTTPS_ONLY : Policy.HTTP_ONLY);
}
public static String getResolvedRMWebAppURLWithoutScheme(Configuration conf,
Policy httpPolicy) {
InetSocketAddress address = null;
if (HttpConfig.isSecure()) {
if (httpPolicy == Policy.HTTPS_ONLY) {
address =
conf.getSocketAddr(YarnConfiguration.RM_WEBAPP_HTTPS_ADDRESS,
YarnConfiguration.DEFAULT_RM_WEBAPP_HTTPS_ADDRESS,

View File

@ -99,6 +99,17 @@
<value>50</value>
</property>
<property>
<description>
This configures the HTTP endpoint for Yarn Daemons.The following
values are supported:
- HTTP_ONLY : Service is provided only on http
- HTTPS_ONLY : Service is provided only on https
</description>
<name>yarn.http.policy</name>
<value>HTTP_ONLY</value>
</property>
<property>
<description>The http address of the RM web application.</description>
<name>yarn.resourcemanager.webapp.address</name>

View File

@ -28,6 +28,8 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpConfig.Policy;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.service.CompositeService;
@ -395,9 +397,16 @@ public class NodeManager extends CompositeService
StringUtils.startupShutdownMessage(NodeManager.class, args, LOG);
NodeManager nodeManager = new NodeManager();
Configuration conf = new YarnConfiguration();
setHttpPolicy(conf);
nodeManager.initAndStartNodeManager(conf, false);
}
private static void setHttpPolicy(Configuration conf) {
HttpConfig.setPolicy(Policy.fromString(conf.get(
YarnConfiguration.YARN_HTTP_POLICY_KEY,
YarnConfiguration.YARN_HTTP_POLICY_DEFAULT)));
}
@VisibleForTesting
@Private
public NodeStatusUpdater getNodeStatusUpdater() {

View File

@ -28,6 +28,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpConfig.Policy;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.source.JvmMetrics;
import org.apache.hadoop.security.SecurityUtil;
@ -935,6 +936,7 @@ public class ResourceManager extends CompositeService implements Recoverable {
ShutdownHookManager.get().addShutdownHook(
new CompositeServiceShutdownHook(resourceManager),
SHUTDOWN_HOOK_PRIORITY);
setHttpPolicy(conf);
resourceManager.init(conf);
resourceManager.start();
} catch (Throwable t) {
@ -942,4 +944,10 @@ public class ResourceManager extends CompositeService implements Recoverable {
System.exit(-1);
}
}
private static void setHttpPolicy(Configuration conf) {
HttpConfig.setPolicy(Policy.fromString(conf.get(
YarnConfiguration.YARN_HTTP_POLICY_KEY,
YarnConfiguration.YARN_HTTP_POLICY_DEFAULT)));
}
}

View File

@ -18,7 +18,11 @@
package org.apache.hadoop.yarn.server.webproxy;
import static org.apache.hadoop.yarn.util.StringHelper.ujoin;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.util.TrackingUriPlugin;
import java.io.UnsupportedEncodingException;
import java.net.URI;
@ -26,11 +30,7 @@ import java.net.URISyntaxException;
import java.net.URLEncoder;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.util.TrackingUriPlugin;
import static org.apache.hadoop.yarn.util.StringHelper.ujoin;
public class ProxyUriUtils {
@SuppressWarnings("unused")
@ -148,9 +148,9 @@ public class ProxyUriUtils {
/*
* check is made to make sure if AM reports with scheme then it will be
* used by default otherwise it will default to the one configured using
* "hadoop.ssl.enabled".
* "yarn.http.policy".
*/
return new URI(HttpConfig.getSchemePrefix() + url);
return new URI(HttpConfig.getSchemePrefix() + url);
} else {
return new URI(url);
}
@ -168,9 +168,9 @@ public class ProxyUriUtils {
/*
* check is made to make sure if AM reports with scheme then it will be
* used by default otherwise it will default to the one configured using
* "hadoop.ssl.enabled".
* "yarn.http.policy".
*/
return new URI(scheme + "://" + noSchemeUrl);
return new URI(scheme + "://" + noSchemeUrl);
} else {
return new URI(noSchemeUrl);
}