HADOOP-12625. Add a config to disable the /logs endpoints (rkanter)

(cherry picked from commit e27fffdb90)
This commit is contained in:
Robert Kanter 2015-12-09 14:32:20 -08:00
parent 6451313560
commit 8dfaa7ded6
5 changed files with 104 additions and 2 deletions

View File

@ -11,6 +11,8 @@ Release 2.9.0 - UNRELEASED
HADOOP-12321. Make JvmPauseMonitor an AbstractService. HADOOP-12321. Make JvmPauseMonitor an AbstractService.
(Sunil G via Stevel) (Sunil G via Stevel)
HADOOP-12625. Add a config to disable the /logs endpoints (rkanter)
OPTIMIZATIONS OPTIMIZATIONS
BUG FIXES BUG FIXES

View File

@ -417,5 +417,11 @@ public class CommonConfigurationKeysPublic {
"hadoop.shell.safely.delete.limit.num.files"; "hadoop.shell.safely.delete.limit.num.files";
public static final long HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES_DEFAULT = public static final long HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES_DEFAULT =
100; 100;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_HTTP_LOGS_ENABLED =
"hadoop.http.logs.enabled";
/** Defalt value for HADOOP_HTTP_LOGS_ENABLED */
public static final boolean HADOOP_HTTP_LOGS_ENABLED_DEFAULT = true;
} }

View File

@ -539,9 +539,13 @@ private static FilterInitializer[] getFilterInitializers(Configuration conf) {
*/ */
protected void addDefaultApps(ContextHandlerCollection parent, protected void addDefaultApps(ContextHandlerCollection parent,
final String appDir, Configuration conf) throws IOException { final String appDir, Configuration conf) throws IOException {
// set up the context for "/logs/" if "hadoop.log.dir" property is defined. // set up the context for "/logs/" if "hadoop.log.dir" property is defined
// and it's enabled.
String logDir = System.getProperty("hadoop.log.dir"); String logDir = System.getProperty("hadoop.log.dir");
if (logDir != null) { boolean logsEnabled = conf.getBoolean(
CommonConfigurationKeys.HADOOP_HTTP_LOGS_ENABLED,
CommonConfigurationKeys.HADOOP_HTTP_LOGS_ENABLED_DEFAULT);
if (logDir != null && logsEnabled) {
Context logContext = new Context(parent, "/logs"); Context logContext = new Context(parent, "/logs");
logContext.setResourceBase(logDir); logContext.setResourceBase(logDir);
logContext.addServlet(AdminAuthorizedServlet.class, "/*"); logContext.addServlet(AdminAuthorizedServlet.class, "/*");

View File

@ -2058,4 +2058,14 @@ for ldap providers in the same way as above does.
<description>The class names of the Span Receivers to use for Hadoop. <description>The class names of the Span Receivers to use for Hadoop.
</description> </description>
</property> </property>
<property>
<description>
Enable the "/logs" endpoint on all Hadoop daemons, which serves local
logs, but may be considered a security risk due to it listing the contents
of a directory.
</description>
<name>hadoop.http.logs.enabled</name>
<value>true</value>
</property>
</configuration> </configuration>

View File

@ -0,0 +1,80 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.http;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.http.resource.JerseyResource;
import org.apache.hadoop.net.NetUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import java.net.HttpURLConnection;
import java.net.URL;
public class TestHttpServerLogs extends HttpServerFunctionalTest {
static final Log LOG = LogFactory.getLog(TestHttpServerLogs.class);
private static HttpServer2 server;
@BeforeClass
public static void setup() throws Exception {
}
private void startServer(Configuration conf) throws Exception {
server = createTestServer(conf);
server.addJerseyResourcePackage(
JerseyResource.class.getPackage().getName(), "/jersey/*");
server.start();
baseUrl = getServerURL(server);
LOG.info("HTTP server started: "+ baseUrl);
}
@AfterClass
public static void cleanup() throws Exception {
if (server != null && server.isAlive()) {
server.stop();
}
}
@Test
public void testLogsEnabled() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean(
CommonConfigurationKeysPublic.HADOOP_HTTP_LOGS_ENABLED, true);
startServer(conf);
URL url = new URL("http://"
+ NetUtils.getHostPortString(server.getConnectorAddress(0)) + "/logs");
HttpURLConnection conn = (HttpURLConnection)url.openConnection();
assertEquals(HttpStatus.SC_OK, conn.getResponseCode());
}
@Test
public void testLogsDisabled() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean(
CommonConfigurationKeysPublic.HADOOP_HTTP_LOGS_ENABLED, false);
startServer(conf);
URL url = new URL(baseUrl + "/logs");
HttpURLConnection conn = (HttpURLConnection)url.openConnection();
assertEquals(HttpStatus.SC_NOT_FOUND, conn.getResponseCode());
}
}