YARN-6638. [ATSv2 Security] Timeline reader side changes for loading auth filters and principals. Contributed by Varun Saxena

This commit is contained in:
Jian He 2017-06-09 13:42:38 -07:00 committed by Varun Saxena
parent 879de51206
commit d3f11e3f13
5 changed files with 160 additions and 68 deletions

View File

@ -51,30 +51,18 @@ import java.util.Map;
public class TimelineAuthenticationFilterInitializer extends FilterInitializer {
/**
* The configuration prefix of timeline HTTP authentication
* The configuration prefix of timeline HTTP authentication.
*/
public static final String PREFIX = "yarn.timeline-service.http-authentication.";
@VisibleForTesting
Map<String, String> filterConfig;
/**
* Initializes {@link TimelineAuthenticationFilter}
* <p>
* Propagates to {@link TimelineAuthenticationFilter} configuration all YARN
* configuration properties prefixed with {@value #PREFIX}
*
* @param container
* The filter container
* @param conf
* Configuration for run-time parameters
*/
@Override
public void initFilter(FilterContainer container, Configuration conf) {
protected void setAuthFilterConfig(Configuration conf) {
filterConfig = new HashMap<String, String>();
// setting the cookie path to root '/' so it is used for all resources.
filterConfig.put(TimelineAuthenticationFilter.COOKIE_PATH, "/");
filterConfig.put(AuthenticationFilter.COOKIE_PATH, "/");
for (Map.Entry<String, String> entry : conf) {
String name = entry.getKey();
@ -95,14 +83,6 @@ public class TimelineAuthenticationFilterInitializer extends FilterInitializer {
}
}
String authType = filterConfig.get(AuthenticationFilter.AUTH_TYPE);
if (authType.equals(PseudoAuthenticationHandler.TYPE)) {
filterConfig.put(AuthenticationFilter.AUTH_TYPE,
PseudoDelegationTokenAuthenticationHandler.class.getName());
} else if (authType.equals(KerberosAuthenticationHandler.TYPE)) {
filterConfig.put(AuthenticationFilter.AUTH_TYPE,
KerberosDelegationTokenAuthenticationHandler.class.getName());
// Resolve _HOST into bind address
String bindAddress = conf.get(HttpServer2.BIND_ADDRESS);
String principal =
@ -111,14 +91,41 @@ public class TimelineAuthenticationFilterInitializer extends FilterInitializer {
try {
principal = SecurityUtil.getServerPrincipal(principal, bindAddress);
} catch (IOException ex) {
throw new RuntimeException(
"Could not resolve Kerberos principal name: " + ex.toString(), ex);
throw new RuntimeException("Could not resolve Kerberos principal " +
"name: " + ex.toString(), ex);
}
filterConfig.put(KerberosAuthenticationHandler.PRINCIPAL,
principal);
}
}
protected Map<String, String> getFilterConfig() {
return filterConfig;
}
/**
* Initializes {@link TimelineAuthenticationFilter}
* <p>
* Propagates to {@link TimelineAuthenticationFilter} configuration all YARN
* configuration properties prefixed with {@value #PREFIX}
*
* @param container
* The filter container
* @param conf
* Configuration for run-time parameters
*/
@Override
public void initFilter(FilterContainer container, Configuration conf) {
setAuthFilterConfig(conf);
String authType = filterConfig.get(AuthenticationFilter.AUTH_TYPE);
if (authType.equals(PseudoAuthenticationHandler.TYPE)) {
filterConfig.put(AuthenticationFilter.AUTH_TYPE,
PseudoDelegationTokenAuthenticationHandler.class.getName());
} else if (authType.equals(KerberosAuthenticationHandler.TYPE)) {
filterConfig.put(AuthenticationFilter.AUTH_TYPE,
KerberosDelegationTokenAuthenticationHandler.class.getName());
}
filterConfig.put(DelegationTokenAuthenticationHandler.TOKEN_KIND,
TimelineDelegationTokenIdentifier.KIND_NAME.toString());

View File

@ -88,13 +88,14 @@ public abstract class AbstractTimelineReaderHBaseTestBase {
config.setInt("hfile.format.version", 3);
server = new TimelineReaderServer() {
@Override
protected void setupOptions(Configuration conf) {
// The parent code tries to use HttpServer2 from this version of
// Hadoop, but the tests are loading in HttpServer2 from
protected void addFilters(Configuration conf) {
// The parent code uses hadoop-common jar from this version of
// Hadoop, but the tests are using hadoop-common jar from
// ${hbase-compatible-hadoop.version}. This version uses Jetty 9
// while ${hbase-compatible-hadoop.version} uses Jetty 6, and there
// are many differences, including classnames and packages.
// We do nothing here, so that we don't cause a NoSuchMethodError.
// We do nothing here, so that we don't cause a NoSuchMethodError or
// NoClassDefFoundError.
// Once ${hbase-compatible-hadoop.version} is changed to Hadoop 3,
// we should be able to remove this @Override.
}

View File

@ -18,19 +18,18 @@
package org.apache.hadoop.yarn.server.timelineservice.reader;
import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER;
import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import java.util.LinkedHashSet;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.http.lib.StaticUserWebFilter;
import org.apache.hadoop.security.HttpCrossOriginFilterInitializer;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.service.CompositeService;
import org.apache.hadoop.util.ExitUtil;
import org.apache.hadoop.util.ReflectionUtils;
@ -40,7 +39,9 @@ import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.server.timelineservice.reader.security.TimelineReaderAuthenticationFilterInitializer;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader;
import org.apache.hadoop.yarn.server.util.timeline.TimelineServerUtils;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
@ -71,6 +72,17 @@ public class TimelineReaderServer extends CompositeService {
if (!YarnConfiguration.timelineServiceV2Enabled(conf)) {
throw new YarnException("timeline service v.2 is not enabled");
}
InetSocketAddress bindAddr = conf.getSocketAddr(
YarnConfiguration.TIMELINE_SERVICE_ADDRESS,
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ADDRESS,
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_PORT);
// Login from keytab if security is enabled.
try {
SecurityUtil.login(conf, YarnConfiguration.TIMELINE_SERVICE_KEYTAB,
YarnConfiguration.TIMELINE_SERVICE_PRINCIPAL, bindAddr.getHostName());
} catch(IOException e) {
throw new YarnRuntimeException("Failed to login from keytab", e);
}
TimelineReader timelineReaderStore = createTimelineReaderStore(conf);
timelineReaderStore.init(conf);
@ -130,29 +142,39 @@ public class TimelineReaderServer extends CompositeService {
super.serviceStop();
}
private void startTimelineReaderWebApp() {
Configuration conf = getConfig();
String bindAddress = WebAppUtils.getWebAppBindURL(conf,
YarnConfiguration.TIMELINE_SERVICE_BIND_HOST,
WebAppUtils.getTimelineReaderWebAppURL(conf));
LOG.info("Instantiating TimelineReaderWebApp at " + bindAddress);
protected void addFilters(Configuration conf) {
boolean enableCorsFilter = conf.getBoolean(
YarnConfiguration.TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED,
YarnConfiguration.TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED_DEFAULT);
// setup CORS
// Setup CORS
if (enableCorsFilter) {
conf.setBoolean(HttpCrossOriginFilterInitializer.PREFIX
+ HttpCrossOriginFilterInitializer.ENABLED_SUFFIX, true);
}
String initializers = conf.get("hadoop.http.filter.initializers", "");
Set<String> defaultInitializers = new LinkedHashSet<String>();
if (!initializers.contains(
TimelineReaderAuthenticationFilterInitializer.class.getName())) {
defaultInitializers.add(
TimelineReaderAuthenticationFilterInitializer.class.getName());
}
TimelineServerUtils.setTimelineFilters(
conf, initializers, defaultInitializers);
}
private void startTimelineReaderWebApp() {
Configuration conf = getConfig();
addFilters(conf);
String bindAddress = WebAppUtils.getWebAppBindURL(conf,
YarnConfiguration.TIMELINE_SERVICE_BIND_HOST,
WebAppUtils.getTimelineReaderWebAppURL(conf));
LOG.info("Instantiating TimelineReaderWebApp at " + bindAddress);
try {
HttpServer2.Builder builder = new HttpServer2.Builder()
.setName("timeline")
.setConf(conf)
.addEndpoint(URI.create("http://" + bindAddress));
readerWebServer = builder.build();
setupOptions(conf);
readerWebServer.addJerseyResourcePackage(
TimelineReaderWebServices.class.getPackage().getName() + ";"
+ GenericExceptionHandler.class.getPackage().getName() + ";"
@ -168,22 +190,6 @@ public class TimelineReaderServer extends CompositeService {
}
}
/**
* Sets up some options and filters.
*
* @param conf Configuration
*/
protected void setupOptions(Configuration conf) {
Map<String, String> options = new HashMap<>();
String username = conf.get(HADOOP_HTTP_STATIC_USER,
DEFAULT_HADOOP_HTTP_STATIC_USER);
options.put(HADOOP_HTTP_STATIC_USER, username);
HttpServer2.defineFilter(readerWebServer.getWebAppContext(),
"static_user_filter_timeline",
StaticUserWebFilter.StaticUserFilter.class.getName(),
options, new String[] {"/*"});
}
@VisibleForTesting
public int getWebServerPort() {
return readerWebServer.getConnectorAddress(0).getPort();

View File

@ -0,0 +1,53 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.reader.security;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.FilterContainer;
import org.apache.hadoop.security.AuthenticationWithProxyUserFilter;
import org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer;
/**
* Filter initializer to initialize {@link AuthenticationWithProxyUserFilter}
* for ATSv2 timeline reader server with timeline service specific
* configurations.
*/
public class TimelineReaderAuthenticationFilterInitializer extends
TimelineAuthenticationFilterInitializer{
/**
* Initializes {@link AuthenticationWithProxyUserFilter}
* <p>
* Propagates to {@link AuthenticationWithProxyUserFilter} configuration all
* YARN configuration properties prefixed with
* {@value TimelineAuthenticationFilterInitializer#PREFIX}.
*
* @param container
* The filter container
* @param conf
* Configuration for run-time parameters
*/
@Override
public void initFilter(FilterContainer container, Configuration conf) {
setAuthFilterConfig(conf);
container.addGlobalFilter("Timeline Reader Authentication Filter",
AuthenticationWithProxyUserFilter.class.getName(),
getFilterConfig());
}
}

View File

@ -0,0 +1,25 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Package org.apache.hadoop.server.timelineservice.reader.security contains
* classes to be used to support SPNEGO authentication for timeline reader.
*/
@InterfaceAudience.Private
package org.apache.hadoop.yarn.server.timelineservice.reader.security;
import org.apache.hadoop.classification.InterfaceAudience;