From afc40e8a6682aa2f80c12c436ac991c60efd920b Mon Sep 17 00:00:00 2001 From: Suresh Srinivas Date: Tue, 28 Jan 2014 07:47:37 +0000 Subject: [PATCH] HADOOP-10255. Merge 1561959 and 1561961 from trunk. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1561966 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-common/CHANGES.txt | 3 + .../org/apache/hadoop/conf/ConfServlet.java | 6 +- .../hadoop/http/AdminAuthorizedServlet.java | 2 +- .../{HttpServer.java => HttpServer2.java} | 249 +++++------------- .../org/apache/hadoop/jmx/JMXJsonServlet.java | 4 +- .../java/org/apache/hadoop/log/LogLevel.java | 4 +- .../apache/hadoop/metrics/MetricsServlet.java | 4 +- .../AuthenticationFilterInitializer.java | 4 +- .../hadoop/http/HttpServerFunctionalTest.java | 34 +-- .../apache/hadoop/http/TestGlobalFilter.java | 6 +- .../apache/hadoop/http/TestHtmlQuoting.java | 4 +- .../apache/hadoop/http/TestHttpServer.java | 65 ++--- .../hadoop/http/TestHttpServerLifecycle.java | 32 +-- .../hadoop/http/TestHttpServerWebapps.java | 4 +- .../apache/hadoop/http/TestPathFilter.java | 6 +- .../apache/hadoop/http/TestSSLHttpServer.java | 6 +- .../apache/hadoop/http/TestServletFilter.java | 14 +- .../apache/hadoop/jmx/TestJMXJsonServlet.java | 4 +- .../org/apache/hadoop/log/TestLogLevel.java | 4 +- .../security/TestAuthenticationFilter.java | 4 +- .../java/org/apache/hadoop/hdfs/DFSUtil.java | 8 +- .../server/JournalNodeHttpServer.java | 6 +- .../hadoop/hdfs/server/datanode/DataNode.java | 6 +- .../datanode/SecureDataNodeStarter.java | 4 +- .../hdfs/server/namenode/GetImageServlet.java | 4 +- .../server/namenode/NameNodeHttpServer.java | 18 +- .../server/namenode/SecondaryNameNode.java | 6 +- .../server/namenode/TestGetImageServlet.java | 4 +- .../server/namenode/TestTransferFsImage.java | 4 +- .../namenode/snapshot/SnapshotTestHelper.java | 4 +- .../mapreduce/v2/app/TestJobEndNotifier.java | 12 +- .../hadoop/mapred/TestJobEndNotifier.java | 6 +- .../org/apache/hadoop/yarn/webapp/WebApp.java | 8 +- .../apache/hadoop/yarn/webapp/WebApps.java | 8 +- .../yarn/server/webproxy/WebAppProxy.java | 6 +- .../webproxy/TestWebAppProxyServlet.java | 6 +- 36 files changed, 218 insertions(+), 351 deletions(-) rename hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/{HttpServer.java => HttpServer2.java} (86%) diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 03855e78231..c53dafc44ba 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -368,6 +368,9 @@ Release 2.3.0 - UNRELEASED HADOOP-9830. Fix typo at http://hadoop.apache.org/docs/current/ (Kousuke Saruta via Arpit Agarwal) + HADOOP-10255. Rename HttpServer to HttpServer2 to retain older + HttpServer in branch-2 for compatibility. (Haohui Mai via suresh) + Release 2.2.0 - 2013-10-13 INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java index da39fa57b74..c7f11b38dbd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java @@ -27,7 +27,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; /** * A servlet to print out the running configuration data. @@ -47,7 +47,7 @@ public class ConfServlet extends HttpServlet { */ private Configuration getConfFromContext() { Configuration conf = (Configuration)getServletContext().getAttribute( - HttpServer.CONF_CONTEXT_ATTRIBUTE); + HttpServer2.CONF_CONTEXT_ATTRIBUTE); assert conf != null; return conf; } @@ -56,7 +56,7 @@ private Configuration getConfFromContext() { public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { - if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(), + if (!HttpServer2.isInstrumentationAccessAllowed(getServletContext(), request, response)) { return; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java index 9e318aec518..ef562b41e6e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java @@ -37,7 +37,7 @@ public class AdminAuthorizedServlet extends DefaultServlet { protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // Do the authorization - if (HttpServer.hasAdministratorAccess(getServletContext(), request, + if (HttpServer2.hasAdministratorAccess(getServletContext(), request, response)) { // Authorization is done. Just call super. super.doGet(request, response); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java similarity index 86% rename from hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java rename to hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java index a20db71c961..069411ba87c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java @@ -24,7 +24,6 @@ import java.net.BindException; import java.net.InetSocketAddress; import java.net.URI; -import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; @@ -89,17 +88,19 @@ import com.sun.jersey.spi.container.servlet.ServletContainer; /** - * Create a Jetty embedded server to answer http requests. The primary goal - * is to serve up status information for the server. - * There are three contexts: - * "/logs/" -> points to the log directory - * "/static/" -> points to common static files (src/webapps/static) - * "/" -> the jsp server code from (src/webapps/) + * Create a Jetty embedded server to answer http requests. The primary goal is + * to serve up status information for the server. There are three contexts: + * "/logs/" -> points to the log directory "/static/" -> points to common static + * files (src/webapps/static) "/" -> the jsp server code from + * (src/webapps/) + * + * This class is a fork of the old HttpServer. HttpServer exists for + * compatibility reasons. See HBASE-10336 for more details. */ -@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "HBase"}) +@InterfaceAudience.Private @InterfaceStability.Evolving -public class HttpServer implements FilterContainer { - public static final Log LOG = LogFactory.getLog(HttpServer.class); +public final class HttpServer2 implements FilterContainer { + public static final Log LOG = LogFactory.getLog(HttpServer2.class); static final String FILTER_INITIALIZER_PROPERTY = "hadoop.http.filter.initializers"; @@ -166,11 +167,6 @@ public static class Builder { // The -keypass option in keytool private String keyPassword; - @Deprecated - private String bindAddress; - @Deprecated - private int port = -1; - private boolean findPort; private String hostName; @@ -204,7 +200,7 @@ public Builder hostName(String hostName) { this.hostName = hostName; return this; } - + public Builder trustStore(String location, String password, String type) { this.trustStore = location; this.trustStorePassword = password; @@ -233,78 +229,51 @@ public Builder needsClientAuth(boolean value) { return this; } - /** - * Use addEndpoint() instead. - */ - @Deprecated - public Builder setBindAddress(String bindAddress){ - this.bindAddress = bindAddress; - return this; - } - - /** - * Use addEndpoint() instead. - */ - @Deprecated - public Builder setPort(int port) { - this.port = port; - return this; - } - public Builder setFindPort(boolean findPort) { this.findPort = findPort; return this; } - + public Builder setConf(Configuration conf) { this.conf = conf; return this; } - + public Builder setConnector(Connector connector) { this.connector = connector; return this; } - + public Builder setPathSpec(String[] pathSpec) { this.pathSpecs = pathSpec; return this; } - + public Builder setACL(AccessControlList acl) { this.adminsAcl = acl; return this; } - + public Builder setSecurityEnabled(boolean securityEnabled) { this.securityEnabled = securityEnabled; return this; } - + public Builder setUsernameConfKey(String usernameConfKey) { this.usernameConfKey = usernameConfKey; return this; } - + public Builder setKeytabConfKey(String keytabConfKey) { this.keytabConfKey = keytabConfKey; return this; } - - public HttpServer build() throws IOException { + + public HttpServer2 build() throws IOException { if (this.name == null) { throw new HadoopIllegalArgumentException("name is not set"); } - // Make the behavior compatible with deprecated interfaces - if (bindAddress != null && port != -1) { - try { - endpoints.add(0, new URI("http", "", bindAddress, port, "", "", "")); - } catch (URISyntaxException e) { - throw new HadoopIllegalArgumentException("Invalid endpoint: "+ e); - } - } - if (endpoints.size() == 0 && connector == null) { throw new HadoopIllegalArgumentException("No endpoints specified"); } @@ -313,12 +282,12 @@ public HttpServer build() throws IOException { hostName = endpoints.size() == 0 ? connector.getHost() : endpoints.get( 0).getHost(); } - + if (this.conf == null) { conf = new Configuration(); } - - HttpServer server = new HttpServer(this); + + HttpServer2 server = new HttpServer2(this); if (this.securityEnabled) { server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey); @@ -332,7 +301,7 @@ public HttpServer build() throws IOException { Connector listener = null; String scheme = ep.getScheme(); if ("http".equals(scheme)) { - listener = HttpServer.createDefaultChannelConnector(); + listener = HttpServer2.createDefaultChannelConnector(); } else if ("https".equals(scheme)) { SslSocketConnector c = new SslSocketConnector(); c.setNeedClientAuth(needsClientAuth); @@ -363,105 +332,8 @@ public HttpServer build() throws IOException { return server; } } - - /** Same as this(name, bindAddress, port, findPort, null); */ - @Deprecated - public HttpServer(String name, String bindAddress, int port, boolean findPort - ) throws IOException { - this(name, bindAddress, port, findPort, new Configuration()); - } - @Deprecated - public HttpServer(String name, String bindAddress, int port, - boolean findPort, Configuration conf, Connector connector) throws IOException { - this(name, bindAddress, port, findPort, conf, null, connector, null); - } - - /** - * Create a status server on the given port. Allows you to specify the - * path specifications that this server will be serving so that they will be - * added to the filters properly. - * - * @param name The name of the server - * @param bindAddress The address for this server - * @param port The port to use on the server - * @param findPort whether the server should start at the given port and - * increment by 1 until it finds a free port. - * @param conf Configuration - * @param pathSpecs Path specifications that this httpserver will be serving. - * These will be added to any filters. - */ - @Deprecated - public HttpServer(String name, String bindAddress, int port, - boolean findPort, Configuration conf, String[] pathSpecs) throws IOException { - this(name, bindAddress, port, findPort, conf, null, null, pathSpecs); - } - - /** - * Create a status server on the given port. - * The jsp scripts are taken from src/webapps/. - * @param name The name of the server - * @param port The port to use on the server - * @param findPort whether the server should start at the given port and - * increment by 1 until it finds a free port. - * @param conf Configuration - */ - @Deprecated - public HttpServer(String name, String bindAddress, int port, - boolean findPort, Configuration conf) throws IOException { - this(name, bindAddress, port, findPort, conf, null, null, null); - } - - @Deprecated - public HttpServer(String name, String bindAddress, int port, - boolean findPort, Configuration conf, AccessControlList adminsAcl) - throws IOException { - this(name, bindAddress, port, findPort, conf, adminsAcl, null, null); - } - - /** - * Create a status server on the given port. - * The jsp scripts are taken from src/webapps/. - * @param name The name of the server - * @param bindAddress The address for this server - * @param port The port to use on the server - * @param findPort whether the server should start at the given port and - * increment by 1 until it finds a free port. - * @param conf Configuration - * @param adminsAcl {@link AccessControlList} of the admins - */ - @Deprecated - public HttpServer(String name, String bindAddress, int port, - boolean findPort, Configuration conf, AccessControlList adminsAcl, - Connector connector) throws IOException { - this(name, bindAddress, port, findPort, conf, adminsAcl, connector, null); - } - - /** - * Create a status server on the given port. - * The jsp scripts are taken from src/webapps/. - * @param name The name of the server - * @param bindAddress The address for this server - * @param port The port to use on the server - * @param findPort whether the server should start at the given port and - * increment by 1 until it finds a free port. - * @param conf Configuration - * @param adminsAcl {@link AccessControlList} of the admins - * @param connector A jetty connection listener - * @param pathSpecs Path specifications that this httpserver will be serving. - * These will be added to any filters. - */ - @Deprecated - public HttpServer(String name, String bindAddress, int port, - boolean findPort, Configuration conf, AccessControlList adminsAcl, - Connector connector, String[] pathSpecs) throws IOException { - this(new Builder().setName(name).hostName(bindAddress) - .addEndpoint(URI.create("http://" + bindAddress + ":" + port)) - .setFindPort(findPort).setConf(conf).setACL(adminsAcl) - .setConnector(connector).setPathSpec(pathSpecs)); - } - - private HttpServer(final Builder b) throws IOException { + private HttpServer2(final Builder b) throws IOException { final String appDir = getWebAppsPath(b.name); this.webServer = new Server(); this.adminsAcl = b.adminsAcl; @@ -554,9 +426,9 @@ private static void addNoCacheFilter(WebAppContext ctxt) { * listener. */ public Connector createBaseListener(Configuration conf) throws IOException { - return HttpServer.createDefaultChannelConnector(); + return HttpServer2.createDefaultChannelConnector(); } - + @InterfaceAudience.Private public static Connector createDefaultChannelConnector() { SelectChannelConnector ret = new SelectChannelConnector(); @@ -567,7 +439,7 @@ public static Connector createDefaultChannelConnector() { if(Shell.WINDOWS) { // result of setting the SO_REUSEADDR flag is different on Windows // http://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx - // without this 2 NN's can start on the same machine and listen on + // without this 2 NN's can start on the same machine and listen on // the same port with indeterminate routing of incoming requests to them ret.setReuseAddress(false); } @@ -601,7 +473,7 @@ private static FilterInitializer[] getFilterInitializers(Configuration conf) { */ protected void addDefaultApps(ContextHandlerCollection parent, final String appDir, Configuration conf) throws IOException { - // set up the context for "/logs/" if "hadoop.log.dir" property is defined. + // set up the context for "/logs/" if "hadoop.log.dir" property is defined. String logDir = System.getProperty("hadoop.log.dir"); if (logDir != null) { Context logContext = new Context(parent, "/logs"); @@ -628,7 +500,7 @@ protected void addDefaultApps(ContextHandlerCollection parent, setContextAttributes(staticContext, conf); defaultContexts.put(staticContext, true); } - + private void setContextAttributes(Context context, Configuration conf) { context.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf); context.getServletContext().setAttribute(ADMINS_ACL, adminsAcl); @@ -654,10 +526,10 @@ public void addContext(Context ctxt, boolean isFiltered) } /** - * Add a context + * Add a context * @param pathSpec The path spec for the context * @param dir The directory containing the context - * @param isFiltered if true, the servlet is added to the filter path mapping + * @param isFiltered if true, the servlet is added to the filter path mapping * @throws IOException */ protected void addContext(String pathSpec, String dir, boolean isFiltered) throws IOException { @@ -680,7 +552,7 @@ public void setAttribute(String name, Object value) { webAppContext.setAttribute(name, value); } - /** + /** * Add a Jersey resource package. * @param packageName The Java package name containing the Jersey resource. * @param pathSpec The path spec for the servlet @@ -709,11 +581,11 @@ public void addServlet(String name, String pathSpec, } /** - * Add an internal servlet in the server. + * Add an internal servlet in the server. * Note: This method is to be used for adding servlets that facilitate * internal communication and not for user facing functionality. For - * servlets added using this method, filters are not enabled. - * + * servlets added using this method, filters are not enabled. + * * @param name The name of the servlet (can be passed as null) * @param pathSpec The path spec for the servlet * @param clazz The servlet class @@ -725,18 +597,18 @@ public void addInternalServlet(String name, String pathSpec, /** * Add an internal servlet in the server, specifying whether or not to - * protect with Kerberos authentication. + * protect with Kerberos authentication. * Note: This method is to be used for adding servlets that facilitate * internal communication and not for user facing functionality. For + * servlets added using this method, filters (except internal Kerberos - * filters) are not enabled. - * + * filters) are not enabled. + * * @param name The name of the servlet (can be passed as null) * @param pathSpec The path spec for the servlet * @param clazz The servlet class * @param requireAuth Require Kerberos authenticate to access servlet */ - public void addInternalServlet(String name, String pathSpec, + public void addInternalServlet(String name, String pathSpec, Class clazz, boolean requireAuth) { ServletHolder holder = new ServletHolder(clazz); if (name != null) { @@ -820,7 +692,7 @@ protected void addFilterPathMapping(String pathSpec, handler.addFilterMapping(fmap); } } - + /** * Get the value in the webapp context. * @param name The name of the attribute @@ -829,7 +701,7 @@ protected void addFilterPathMapping(String pathSpec, public Object getAttribute(String name) { return webAppContext.getAttribute(name); } - + public WebAppContext getWebAppContext(){ return this.webAppContext; } @@ -842,7 +714,7 @@ public WebAppContext getWebAppContext(){ */ protected String getWebAppsPath(String appName) throws FileNotFoundException { URL url = getClass().getClassLoader().getResource("webapps/" + appName); - if (url == null) + if (url == null) throw new FileNotFoundException("webapps/" + appName + " not found in CLASSPATH"); String urlString = url.toString(); @@ -900,7 +772,7 @@ private void initSpnego(Configuration conf, String hostName, params.put("kerberos.keytab", httpKeytab); } params.put(AuthenticationFilter.AUTH_TYPE, "kerberos"); - + defineFilter(webAppContext, SPNEGO_FILTER, AuthenticationFilter.class.getName(), params, null); } @@ -939,6 +811,9 @@ public void start() throws IOException { } } catch (IOException e) { throw e; + } catch (InterruptedException e) { + throw (IOException) new InterruptedIOException( + "Interrupted while starting HTTP server").initCause(e); } catch (Exception e) { throw new IOException("Problem starting http server", e); } @@ -984,7 +859,7 @@ void openListeners() throws Exception { } } } - + /** * stop the server */ @@ -1102,7 +977,7 @@ public static boolean isInstrumentationAccessAllowed( /** * Does the user sending the HttpServletRequest has the administrator ACLs? If * it isn't the case, response will be modified to send an error to the user. - * + * * @param servletContext * @param request * @param response used to send the error response if user does not have admin access. @@ -1127,7 +1002,7 @@ public static boolean hasAdministratorAccess( "authorized to access this page."); return false; } - + if (servletContext.getAttribute(ADMINS_ACL) != null && !userHasAdministratorAccess(servletContext, remoteUser)) { response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User " @@ -1141,7 +1016,7 @@ public static boolean hasAdministratorAccess( /** * Get the admin ACLs from the given ServletContext and check if the given * user is in the ACL. - * + * * @param servletContext the context containing the admin ACL. * @param remoteUser the remote user to check for. * @return true if the user is present in the ACL, false if no ACL is set or @@ -1168,7 +1043,7 @@ public static class StackServlet extends HttpServlet { @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { - if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(), + if (!HttpServer2.isInstrumentationAccessAllowed(getServletContext(), request, response)) { return; } @@ -1176,10 +1051,10 @@ public void doGet(HttpServletRequest request, HttpServletResponse response) PrintWriter out = response.getWriter(); ReflectionUtils.printThreadInfo(out, ""); out.close(); - ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1); + ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1); } } - + /** * A Servlet input filter that quotes all HTML active characters in the * parameter names and values. The goal is to quote the characters to make @@ -1194,7 +1069,7 @@ public RequestQuoter(HttpServletRequest rawRequest) { super(rawRequest); this.rawRequest = rawRequest; } - + /** * Return the set of parameter names, quoting each name. */ @@ -1215,7 +1090,7 @@ public String nextElement() { } }; } - + /** * Unquote the name and quote the value. */ @@ -1224,7 +1099,7 @@ public String getParameter(String name) { return HtmlQuoting.quoteHtmlChars(rawRequest.getParameter (HtmlQuoting.unquoteHtmlChars(name))); } - + @Override public String[] getParameterValues(String name) { String unquoteName = HtmlQuoting.unquoteHtmlChars(name); @@ -1254,7 +1129,7 @@ public Map getParameterMap() { } return result; } - + /** * Quote the url so that users specifying the HOST HTTP header * can't inject attacks. @@ -1264,7 +1139,7 @@ public StringBuffer getRequestURL(){ String url = rawRequest.getRequestURL().toString(); return new StringBuffer(HtmlQuoting.quoteHtmlChars(url)); } - + /** * Quote the server name so that users specifying the HOST HTTP header * can't inject attacks. @@ -1285,11 +1160,11 @@ public void destroy() { } @Override - public void doFilter(ServletRequest request, + public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain ) throws IOException, ServletException { - HttpServletRequestWrapper quoted = + HttpServletRequestWrapper quoted = new RequestQuoter((HttpServletRequest) request); HttpServletResponse httpResponse = (HttpServletResponse) response; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java index 3b605e01a9e..33ee9f90de7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java @@ -46,7 +46,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonGenerator; @@ -153,7 +153,7 @@ public void doGet(HttpServletRequest request, HttpServletResponse response) { String jsonpcb = null; PrintWriter writer = null; try { - if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(), + if (!HttpServer2.isInstrumentationAccessAllowed(getServletContext(), request, response)) { return; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java index 5e2ed532556..77f74cc4049 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java @@ -28,7 +28,7 @@ import org.apache.commons.logging.impl.*; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.util.ServletUtil; /** @@ -93,7 +93,7 @@ public void doGet(HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { // Do the authorization - if (!HttpServer.hasAdministratorAccess(getServletContext(), request, + if (!HttpServer2.hasAdministratorAccess(getServletContext(), request, response)) { return; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java index 9d7e1e54b73..8f5dcd1bc0b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java @@ -32,7 +32,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.metrics.spi.OutputRecord; import org.apache.hadoop.metrics.spi.AbstractMetricsContext.MetricMap; import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap; @@ -106,7 +106,7 @@ Map>> makeMap( public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { - if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(), + if (!HttpServer2.isInstrumentationAccessAllowed(getServletContext(), request, response)) { return; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java index 4d23dbe76fd..4fb9e456141 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.security; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.FilterContainer; @@ -94,7 +94,7 @@ public void initFilter(FilterContainer container, Configuration conf) { } //Resolve _HOST into bind address - String bindAddress = conf.get(HttpServer.BIND_ADDRESS); + String bindAddress = conf.get(HttpServer2.BIND_ADDRESS); String principal = filterConfig.get(KerberosAuthenticationHandler.PRINCIPAL); if (principal != null) { try { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java index e03059a3470..ecf2d0f4446 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java @@ -23,7 +23,7 @@ import org.apache.hadoop.security.authorize.AccessControlList; import org.junit.Assert; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.http.HttpServer.Builder; +import org.apache.hadoop.http.HttpServer2.Builder; import java.io.File; import java.io.IOException; @@ -33,7 +33,7 @@ import java.net.MalformedURLException; /** - * This is a base class for functional tests of the {@link HttpServer}. + * This is a base class for functional tests of the {@link HttpServer2}. * The methods are static for other classes to import statically. */ public class HttpServerFunctionalTest extends Assert { @@ -54,7 +54,7 @@ public class HttpServerFunctionalTest extends Assert { * @throws IOException if a problem occurs * @throws AssertionError if a condition was not met */ - public static HttpServer createTestServer() throws IOException { + public static HttpServer2 createTestServer() throws IOException { prepareTestWebapp(); return createServer(TEST); } @@ -68,13 +68,13 @@ public static HttpServer createTestServer() throws IOException { * @throws IOException if a problem occurs * @throws AssertionError if a condition was not met */ - public static HttpServer createTestServer(Configuration conf) + public static HttpServer2 createTestServer(Configuration conf) throws IOException { prepareTestWebapp(); return createServer(TEST, conf); } - public static HttpServer createTestServer(Configuration conf, AccessControlList adminsAcl) + public static HttpServer2 createTestServer(Configuration conf, AccessControlList adminsAcl) throws IOException { prepareTestWebapp(); return createServer(TEST, conf, adminsAcl); @@ -89,7 +89,7 @@ public static HttpServer createTestServer(Configuration conf, AccessControlList * @throws IOException if a problem occurs * @throws AssertionError if a condition was not met */ - public static HttpServer createTestServer(Configuration conf, + public static HttpServer2 createTestServer(Configuration conf, String[] pathSpecs) throws IOException { prepareTestWebapp(); return createServer(TEST, conf, pathSpecs); @@ -120,10 +120,10 @@ protected static void prepareTestWebapp() { * @return the server * @throws IOException if it could not be created */ - public static HttpServer createServer(String host, int port) + public static HttpServer2 createServer(String host, int port) throws IOException { prepareTestWebapp(); - return new HttpServer.Builder().setName(TEST) + return new HttpServer2.Builder().setName(TEST) .addEndpoint(URI.create("http://" + host + ":" + port)) .setFindPort(true).build(); } @@ -134,7 +134,7 @@ public static HttpServer createServer(String host, int port) * @return the server * @throws IOException if it could not be created */ - public static HttpServer createServer(String webapp) throws IOException { + public static HttpServer2 createServer(String webapp) throws IOException { return localServerBuilder(webapp).setFindPort(true).build(); } /** @@ -144,18 +144,18 @@ public static HttpServer createServer(String webapp) throws IOException { * @return the server * @throws IOException if it could not be created */ - public static HttpServer createServer(String webapp, Configuration conf) + public static HttpServer2 createServer(String webapp, Configuration conf) throws IOException { return localServerBuilder(webapp).setFindPort(true).setConf(conf).build(); } - public static HttpServer createServer(String webapp, Configuration conf, AccessControlList adminsAcl) + public static HttpServer2 createServer(String webapp, Configuration conf, AccessControlList adminsAcl) throws IOException { return localServerBuilder(webapp).setFindPort(true).setConf(conf).setACL(adminsAcl).build(); } private static Builder localServerBuilder(String webapp) { - return new HttpServer.Builder().setName(webapp).addEndpoint( + return new HttpServer2.Builder().setName(webapp).addEndpoint( URI.create("http://localhost:0")); } @@ -167,7 +167,7 @@ private static Builder localServerBuilder(String webapp) { * @return the server * @throws IOException if it could not be created */ - public static HttpServer createServer(String webapp, Configuration conf, + public static HttpServer2 createServer(String webapp, Configuration conf, String[] pathSpecs) throws IOException { return localServerBuilder(webapp).setFindPort(true).setConf(conf).setPathSpec(pathSpecs).build(); } @@ -180,8 +180,8 @@ public static HttpServer createServer(String webapp, Configuration conf, * @throws IOException on any failure * @throws AssertionError if a condition was not met */ - public static HttpServer createAndStartTestServer() throws IOException { - HttpServer server = createTestServer(); + public static HttpServer2 createAndStartTestServer() throws IOException { + HttpServer2 server = createTestServer(); server.start(); return server; } @@ -191,7 +191,7 @@ public static HttpServer createAndStartTestServer() throws IOException { * @param server to stop * @throws Exception on any failure */ - public static void stop(HttpServer server) throws Exception { + public static void stop(HttpServer2 server) throws Exception { if (server != null) { server.stop(); } @@ -203,7 +203,7 @@ public static void stop(HttpServer server) throws Exception { * @return a URL bonded to the base of the server * @throws MalformedURLException if the URL cannot be created. */ - public static URL getServerURL(HttpServer server) + public static URL getServerURL(HttpServer2 server) throws MalformedURLException { assertNotNull("No server", server); return new URL("http://" diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java index 70db923284c..0e4a1caeef9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java @@ -40,7 +40,7 @@ import org.junit.Test; public class TestGlobalFilter extends HttpServerFunctionalTest { - static final Log LOG = LogFactory.getLog(HttpServer.class); + static final Log LOG = LogFactory.getLog(HttpServer2.class); static final Set RECORDS = new TreeSet(); /** A very simple filter that records accessed uri's */ @@ -106,9 +106,9 @@ public void testServletFilter() throws Exception { Configuration conf = new Configuration(); //start a http server with CountingFilter - conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY, + conf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY, RecordingFilter.Initializer.class.getName()); - HttpServer http = createTestServer(conf); + HttpServer2 http = createTestServer(conf); http.start(); final String fsckURL = "/fsck"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHtmlQuoting.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHtmlQuoting.java index 9fc53a3b6fb..775754d9f87 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHtmlQuoting.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHtmlQuoting.java @@ -68,8 +68,8 @@ private void runRoundTrip(String str) throws Exception { @Test public void testRequestQuoting() throws Exception { HttpServletRequest mockReq = Mockito.mock(HttpServletRequest.class); - HttpServer.QuotingInputFilter.RequestQuoter quoter = - new HttpServer.QuotingInputFilter.RequestQuoter(mockReq); + HttpServer2.QuotingInputFilter.RequestQuoter quoter = + new HttpServer2.QuotingInputFilter.RequestQuoter(mockReq); Mockito.doReturn("aany())).thenReturn(false); - Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls); - Assert.assertFalse(HttpServer.hasAdministratorAccess(context, request, response)); + Mockito.when(context.getAttribute(HttpServer2.ADMINS_ACL)).thenReturn(acls); + Assert.assertFalse(HttpServer2.hasAdministratorAccess(context, request, response)); Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString()); //authorization ON & user NOT NULL & ACLs NOT NULL & user in in ACLs response = Mockito.mock(HttpServletResponse.class); Mockito.when(acls.isUserAllowed(Mockito.any())).thenReturn(true); - Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls); - Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, response)); + Mockito.when(context.getAttribute(HttpServer2.ADMINS_ACL)).thenReturn(acls); + Assert.assertTrue(HttpServer2.hasAdministratorAccess(context, request, response)); } @@ -508,38 +508,27 @@ public void testHasAdministratorAccess() throws Exception { public void testRequiresAuthorizationAccess() throws Exception { Configuration conf = new Configuration(); ServletContext context = Mockito.mock(ServletContext.class); - Mockito.when(context.getAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE)).thenReturn(conf); + Mockito.when(context.getAttribute(HttpServer2.CONF_CONTEXT_ATTRIBUTE)).thenReturn(conf); HttpServletRequest request = Mockito.mock(HttpServletRequest.class); HttpServletResponse response = Mockito.mock(HttpServletResponse.class); //requires admin access to instrumentation, FALSE by default - Assert.assertTrue(HttpServer.isInstrumentationAccessAllowed(context, request, response)); + Assert.assertTrue(HttpServer2.isInstrumentationAccessAllowed(context, request, response)); //requires admin access to instrumentation, TRUE conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN, true); conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true); AccessControlList acls = Mockito.mock(AccessControlList.class); Mockito.when(acls.isUserAllowed(Mockito.any())).thenReturn(false); - Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls); - Assert.assertFalse(HttpServer.isInstrumentationAccessAllowed(context, request, response)); - } - - @Test - @SuppressWarnings("deprecation") - public void testOldConstructor() throws Exception { - HttpServer server = new HttpServer("test", "0.0.0.0", 0, false); - try { - server.start(); - } finally { - server.stop(); - } + Mockito.when(context.getAttribute(HttpServer2.ADMINS_ACL)).thenReturn(acls); + Assert.assertFalse(HttpServer2.isInstrumentationAccessAllowed(context, request, response)); } @Test public void testBindAddress() throws Exception { checkBindAddress("localhost", 0, false).stop(); // hang onto this one for a bit more testing - HttpServer myServer = checkBindAddress("localhost", 0, false); - HttpServer myServer2 = null; + HttpServer2 myServer = checkBindAddress("localhost", 0, false); + HttpServer2 myServer2 = null; try { int port = myServer.getConnectorAddress(0).getPort(); // it's already in use, true = expect a higher port @@ -558,9 +547,9 @@ public void testOldConstructor() throws Exception { } } - private HttpServer checkBindAddress(String host, int port, boolean findPort) + private HttpServer2 checkBindAddress(String host, int port, boolean findPort) throws Exception { - HttpServer server = createServer(host, port); + HttpServer2 server = createServer(host, port); try { // not bound, ephemeral should return requested port (0 for ephemeral) List listeners = (List) Whitebox.getInternalState(server, @@ -608,7 +597,7 @@ public void testNoCacheHeader() throws Exception { public void testHttpServerBuilderWithExternalConnector() throws Exception { Connector c = mock(Connector.class); doReturn("localhost").when(c).getHost(); - HttpServer s = new HttpServer.Builder().setName("test").setConnector(c) + HttpServer2 s = new HttpServer2.Builder().setName("test").setConnector(c) .build(); s.stop(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java index d7330e87140..edae3c25cb2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java @@ -23,18 +23,18 @@ public class TestHttpServerLifecycle extends HttpServerFunctionalTest { /** - * Check that a server is alive by probing the {@link HttpServer#isAlive()} method + * Check that a server is alive by probing the {@link HttpServer2#isAlive()} method * and the text of its toString() description * @param server server */ - private void assertAlive(HttpServer server) { + private void assertAlive(HttpServer2 server) { assertTrue("Server is not alive", server.isAlive()); - assertToStringContains(server, HttpServer.STATE_DESCRIPTION_ALIVE); + assertToStringContains(server, HttpServer2.STATE_DESCRIPTION_ALIVE); } - private void assertNotLive(HttpServer server) { + private void assertNotLive(HttpServer2 server) { assertTrue("Server should not be live", !server.isAlive()); - assertToStringContains(server, HttpServer.STATE_DESCRIPTION_NOT_LIVE); + assertToStringContains(server, HttpServer2.STATE_DESCRIPTION_NOT_LIVE); } /** @@ -43,12 +43,12 @@ private void assertNotLive(HttpServer server) { * @throws Throwable on failure */ @Test public void testCreatedServerIsNotAlive() throws Throwable { - HttpServer server = createTestServer(); + HttpServer2 server = createTestServer(); assertNotLive(server); } @Test public void testStopUnstartedServer() throws Throwable { - HttpServer server = createTestServer(); + HttpServer2 server = createTestServer(); stop(server); } @@ -59,7 +59,7 @@ private void assertNotLive(HttpServer server) { */ @Test public void testStartedServerIsAlive() throws Throwable { - HttpServer server = null; + HttpServer2 server = null; server = createTestServer(); assertNotLive(server); server.start(); @@ -78,22 +78,22 @@ public void testStartedServerWithRequestLog() throws Throwable { requestLogAppender.setName("httprequestlog"); requestLogAppender.setFilename(System.getProperty("test.build.data", "/tmp/") + "jetty-name-yyyy_mm_dd.log"); - Logger.getLogger(HttpServer.class.getName() + ".test").addAppender(requestLogAppender); - HttpServer server = null; + Logger.getLogger(HttpServer2.class.getName() + ".test").addAppender(requestLogAppender); + HttpServer2 server = null; server = createTestServer(); assertNotLive(server); server.start(); assertAlive(server); stop(server); - Logger.getLogger(HttpServer.class.getName() + ".test").removeAppender(requestLogAppender); + Logger.getLogger(HttpServer2.class.getName() + ".test").removeAppender(requestLogAppender); } /** - * Assert that the result of {@link HttpServer#toString()} contains the specific text + * Assert that the result of {@link HttpServer2#toString()} contains the specific text * @param server server to examine * @param text text to search for */ - private void assertToStringContains(HttpServer server, String text) { + private void assertToStringContains(HttpServer2 server, String text) { String description = server.toString(); assertTrue("Did not find \"" + text + "\" in \"" + description + "\"", description.contains(text)); @@ -105,7 +105,7 @@ private void assertToStringContains(HttpServer server, String text) { * @throws Throwable on failure */ @Test public void testStoppedServerIsNotAlive() throws Throwable { - HttpServer server = createAndStartTestServer(); + HttpServer2 server = createAndStartTestServer(); assertAlive(server); stop(server); assertNotLive(server); @@ -117,7 +117,7 @@ private void assertToStringContains(HttpServer server, String text) { * @throws Throwable on failure */ @Test public void testStoppingTwiceServerIsAllowed() throws Throwable { - HttpServer server = createAndStartTestServer(); + HttpServer2 server = createAndStartTestServer(); assertAlive(server); stop(server); assertNotLive(server); @@ -133,7 +133,7 @@ private void assertToStringContains(HttpServer server, String text) { */ @Test public void testWepAppContextAfterServerStop() throws Throwable { - HttpServer server = null; + HttpServer2 server = null; String key = "test.attribute.key"; String value = "test.attribute.value"; server = createTestServer(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java index c3ae6cef2f8..c92944e57ff 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java @@ -36,7 +36,7 @@ public class TestHttpServerWebapps extends HttpServerFunctionalTest { */ @Test public void testValidServerResource() throws Throwable { - HttpServer server = null; + HttpServer2 server = null; try { server = createServer("test"); } finally { @@ -51,7 +51,7 @@ public void testValidServerResource() throws Throwable { @Test public void testMissingServerResource() throws Throwable { try { - HttpServer server = createServer("NoSuchWebapp"); + HttpServer2 server = createServer("NoSuchWebapp"); //should not have got here. //close the server String serverDescription = server.toString(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java index be5900e64cd..09f31dff7b3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java @@ -40,7 +40,7 @@ import org.junit.Test; public class TestPathFilter extends HttpServerFunctionalTest { - static final Log LOG = LogFactory.getLog(HttpServer.class); + static final Log LOG = LogFactory.getLog(HttpServer2.class); static final Set RECORDS = new TreeSet(); /** A very simple filter that records accessed uri's */ @@ -107,10 +107,10 @@ public void testPathSpecFilters() throws Exception { Configuration conf = new Configuration(); //start a http server with CountingFilter - conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY, + conf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY, RecordingFilter.Initializer.class.getName()); String[] pathSpecs = { "/path", "/path/*" }; - HttpServer http = createTestServer(conf, pathSpecs); + HttpServer2 http = createTestServer(conf, pathSpecs); http.start(); final String baseURL = "/path"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java index cb4b66b4c31..3d5d8b63f97 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java @@ -48,7 +48,7 @@ public class TestSSLHttpServer extends HttpServerFunctionalTest { private static final Log LOG = LogFactory.getLog(TestSSLHttpServer.class); private static Configuration conf; - private static HttpServer server; + private static HttpServer2 server; private static URL baseUrl; private static String keystoresDir; private static String sslConfDir; @@ -57,7 +57,7 @@ public class TestSSLHttpServer extends HttpServerFunctionalTest { @BeforeClass public static void setup() throws Exception { conf = new Configuration(); - conf.setInt(HttpServer.HTTP_MAX_THREADS, 10); + conf.setInt(HttpServer2.HTTP_MAX_THREADS, 10); File base = new File(BASEDIR); FileUtil.fullyDelete(base); @@ -73,7 +73,7 @@ public static void setup() throws Exception { clientSslFactory = new SSLFactory(SSLFactory.Mode.CLIENT, sslConf); clientSslFactory.init(); - server = new HttpServer.Builder() + server = new HttpServer2.Builder() .setName("test") .addEndpoint(new URI("https://localhost")) .setConf(conf) diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java index 66113f500f1..6b17ccc84e2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java @@ -40,7 +40,7 @@ import org.junit.Test; public class TestServletFilter extends HttpServerFunctionalTest { - static final Log LOG = LogFactory.getLog(HttpServer.class); + static final Log LOG = LogFactory.getLog(HttpServer2.class); static volatile String uri = null; /** A very simple filter which record the uri filtered. */ @@ -105,9 +105,9 @@ public void testServletFilter() throws Exception { Configuration conf = new Configuration(); //start a http server with CountingFilter - conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY, + conf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY, SimpleFilter.Initializer.class.getName()); - HttpServer http = createTestServer(conf); + HttpServer2 http = createTestServer(conf); http.start(); final String fsckURL = "/fsck"; @@ -166,9 +166,9 @@ public void initFilter(FilterContainer container, Configuration conf) { public void testServletFilterWhenInitThrowsException() throws Exception { Configuration conf = new Configuration(); // start a http server with CountingFilter - conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY, + conf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY, ErrorFilter.Initializer.class.getName()); - HttpServer http = createTestServer(conf); + HttpServer2 http = createTestServer(conf); try { http.start(); fail("expecting exception"); @@ -186,8 +186,8 @@ public void testServletFilterWhenInitThrowsException() throws Exception { public void testContextSpecificServletFilterWhenInitThrowsException() throws Exception { Configuration conf = new Configuration(); - HttpServer http = createTestServer(conf); - HttpServer.defineFilter(http.webAppContext, + HttpServer2 http = createTestServer(conf); + HttpServer2.defineFilter(http.webAppContext, "ErrorFilter", ErrorFilter.class.getName(), null, null); try { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/jmx/TestJMXJsonServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/jmx/TestJMXJsonServlet.java index a03eba92c39..978e9077488 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/jmx/TestJMXJsonServlet.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/jmx/TestJMXJsonServlet.java @@ -24,7 +24,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.http.HttpServerFunctionalTest; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -32,7 +32,7 @@ public class TestJMXJsonServlet extends HttpServerFunctionalTest { private static final Log LOG = LogFactory.getLog(TestJMXJsonServlet.class); - private static HttpServer server; + private static HttpServer2 server; private static URL baseUrl; @BeforeClass public static void setup() throws Exception { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java index c9b2a09f513..0f0a7c33428 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java @@ -20,7 +20,7 @@ import java.io.*; import java.net.*; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.net.NetUtils; import junit.framework.TestCase; @@ -44,7 +44,7 @@ public void testDynamicLogLevel() throws Exception { log.error("log.error1"); assertTrue(!Level.ERROR.equals(log.getEffectiveLevel())); - HttpServer server = new HttpServer.Builder().setName("..") + HttpServer2 server = new HttpServer2.Builder().setName("..") .addEndpoint(new URI("http://localhost:0")).setFindPort(true) .build(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java index d8138817e1f..b6aae0eb637 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java @@ -18,7 +18,7 @@ import junit.framework.TestCase; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.FilterContainer; @@ -49,7 +49,7 @@ public void testConfiguration() throws Exception { AuthenticationFilterInitializer.SIGNATURE_SECRET_FILE, secretFile.getAbsolutePath()); - conf.set(HttpServer.BIND_ADDRESS, "barhost"); + conf.set(HttpServer2.BIND_ADDRESS, "barhost"); FilterContainer container = Mockito.mock(FilterContainer.class); Mockito.doAnswer( diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java index 8e7a5e14629..6dc4aab6b91 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java @@ -85,7 +85,7 @@ import org.apache.hadoop.hdfs.web.SWebHdfsFileSystem; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.http.HttpConfig; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; @@ -1526,7 +1526,7 @@ public static HttpConfig.Policy getHttpPolicy(Configuration conf) { return policy; } - public static HttpServer.Builder loadSslConfToHttpServerBuilder(HttpServer.Builder builder, + public static HttpServer2.Builder loadSslConfToHttpServerBuilder(HttpServer2.Builder builder, Configuration sslConf) { return builder .needsClientAuth( @@ -1562,13 +1562,13 @@ public static Configuration loadSslConfiguration(Configuration conf) { * namenode can use to initialize their HTTP / HTTPS server. * */ - public static HttpServer.Builder httpServerTemplateForNNAndJN( + public static HttpServer2.Builder httpServerTemplateForNNAndJN( Configuration conf, final InetSocketAddress httpAddr, final InetSocketAddress httpsAddr, String name, String spnegoUserNameKey, String spnegoKeytabFileKey) throws IOException { HttpConfig.Policy policy = getHttpPolicy(conf); - HttpServer.Builder builder = new HttpServer.Builder().setName(name) + HttpServer2.Builder builder = new HttpServer2.Builder().setName(name) .setConf(conf).setACL(new AccessControlList(conf.get(DFS_ADMIN, " "))) .setSecurityEnabled(UserGroupInformation.isSecurityEnabled()) .setUsernameConfKey(spnegoUserNameKey) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeHttpServer.java index 4a7b95cca09..f58de600adb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeHttpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeHttpServer.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.server.common.JspHelper; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.net.NetUtils; /** @@ -38,7 +38,7 @@ public class JournalNodeHttpServer { public static final String JN_ATTRIBUTE_KEY = "localjournal"; - private HttpServer httpServer; + private HttpServer2 httpServer; private JournalNode localJournalNode; private final Configuration conf; @@ -56,7 +56,7 @@ void start() throws IOException { DFSConfigKeys.DFS_JOURNALNODE_HTTPS_ADDRESS_DEFAULT); InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString); - HttpServer.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf, + HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf, httpAddr, httpsAddr, "journal", DFSConfigKeys.DFS_JOURNALNODE_INTERNAL_SPNEGO_USER_NAME_KEY, DFSConfigKeys.DFS_JOURNALNODE_KEYTAB_FILE_KEY); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java index 73273ce25db..a32554750f5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java @@ -121,7 +121,7 @@ import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.hdfs.web.resources.Param; import org.apache.hadoop.http.HttpConfig; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.ReadaheadPool; import org.apache.hadoop.io.nativeio.NativeIO; @@ -236,7 +236,7 @@ public static InetSocketAddress createSocketAddr(String target) { private volatile boolean heartbeatsDisabledForTests = false; private DataStorage storage = null; - private HttpServer infoServer = null; + private HttpServer2 infoServer = null; private int infoPort; private int infoSecurePort; @@ -359,7 +359,7 @@ private static String getHostName(Configuration config) * Http Policy is decided. */ private void startInfoServer(Configuration conf) throws IOException { - HttpServer.Builder builder = new HttpServer.Builder().setName("datanode") + HttpServer2.Builder builder = new HttpServer2.Builder().setName("datanode") .setConf(conf).setACL(new AccessControlList(conf.get(DFS_ADMIN, " "))); HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java index b6dc7fedbe2..000f7aedd2d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.http.HttpConfig; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.security.UserGroupInformation; import org.mortbay.jetty.Connector; @@ -119,7 +119,7 @@ public static SecureResources getSecureResources(Configuration conf) // certificates if they are communicating through SSL. Connector listener = null; if (policy.isHttpEnabled()) { - listener = HttpServer.createDefaultChannelConnector(); + listener = HttpServer2.createDefaultChannelConnector(); InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf); listener.setHost(infoSocAddr.getHostName()); listener.setPort(infoSocAddr.getPort()); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java index 7ac69f80848..93967f0d27c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java @@ -47,7 +47,7 @@ import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog; import org.apache.hadoop.hdfs.util.DataTransferThrottler; import org.apache.hadoop.hdfs.util.MD5FileUtils; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.MD5Hash; import org.apache.hadoop.security.UserGroupInformation; @@ -287,7 +287,7 @@ static boolean isValidRequestor(ServletContext context, String remoteUser, } } - if (HttpServer.userHasAdministratorAccess(context, remoteUser)) { + if (HttpServer2.userHasAdministratorAccess(context, remoteUser)) { LOG.info("GetImageServlet allowing administrator: " + remoteUser); return true; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java index c6407ffe984..fbeb141a5a3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java @@ -37,7 +37,7 @@ import org.apache.hadoop.hdfs.web.resources.Param; import org.apache.hadoop.hdfs.web.resources.UserParam; import org.apache.hadoop.http.HttpConfig; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; @@ -47,7 +47,7 @@ */ @InterfaceAudience.Private public class NameNodeHttpServer { - private HttpServer httpServer; + private HttpServer2 httpServer; private final Configuration conf; private final NameNode nn; @@ -68,7 +68,7 @@ public class NameNodeHttpServer { } private void initWebHdfs(Configuration conf) throws IOException { - if (WebHdfsFileSystem.isEnabled(conf, HttpServer.LOG)) { + if (WebHdfsFileSystem.isEnabled(conf, HttpServer2.LOG)) { // set user pattern based on configuration file UserParam.setUserPattern(conf.get(DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_KEY, DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT)); //add SPNEGO authentication filter for webhdfs @@ -76,9 +76,9 @@ private void initWebHdfs(Configuration conf) throws IOException { final String classname = AuthFilter.class.getName(); final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*"; Map params = getAuthFilterParams(conf); - HttpServer.defineFilter(httpServer.getWebAppContext(), name, classname, params, + HttpServer2.defineFilter(httpServer.getWebAppContext(), name, classname, params, new String[]{pathSpec}); - HttpServer.LOG.info("Added filter '" + name + "' (class=" + classname + ")"); + HttpServer2.LOG.info("Added filter '" + name + "' (class=" + classname + ")"); // add webhdfs packages httpServer.addJerseyResourcePackage( @@ -102,7 +102,7 @@ void start() throws IOException { DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT); InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString); - HttpServer.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf, + HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf, httpAddr, httpsAddr, "hdfs", DFSConfigKeys.DFS_NAMENODE_INTERNAL_SPNEGO_USER_NAME_KEY, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY); @@ -151,7 +151,7 @@ private Map getAuthFilterParams(Configuration conf) SecurityUtil.getServerPrincipal(principalInConf, bindAddress.getHostName())); } else if (UserGroupInformation.isSecurityEnabled()) { - HttpServer.LOG.error( + HttpServer2.LOG.error( "WebHDFS and security are enabled, but configuration property '" + DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY + "' is not set."); @@ -163,7 +163,7 @@ private Map getAuthFilterParams(Configuration conf) DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY, httpKeytab); } else if (UserGroupInformation.isSecurityEnabled()) { - HttpServer.LOG.error( + HttpServer2.LOG.error( "WebHDFS and security are enabled, but configuration property '" + DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY + "' is not set."); @@ -213,7 +213,7 @@ void setStartupProgress(StartupProgress prog) { httpServer.setAttribute(STARTUP_PROGRESS_ATTRIBUTE_KEY, prog); } - private static void setupServlets(HttpServer httpServer, Configuration conf) { + private static void setupServlets(HttpServer2 httpServer, Configuration conf) { httpServer.addInternalServlet("startupProgress", StartupProgressServlet.PATH_SPEC, StartupProgressServlet.class); httpServer.addInternalServlet("getDelegationToken", diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java index a593afc570d..f7592a8975b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java @@ -65,7 +65,7 @@ import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest; import org.apache.hadoop.http.HttpConfig; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.io.MD5Hash; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; @@ -113,7 +113,7 @@ public class SecondaryNameNode implements Runnable { private Configuration conf; private InetSocketAddress nameNodeAddr; private volatile boolean shouldRun; - private HttpServer infoServer; + private HttpServer2 infoServer; private URL imageListenURL; private Collection checkpointDirs; @@ -257,7 +257,7 @@ private void initialize(final Configuration conf, DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_DEFAULT); InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString); - HttpServer.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf, + HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf, httpAddr, httpsAddr, "secondary", DFSConfigKeys.DFS_SECONDARY_NAMENODE_INTERNAL_SPNEGO_USER_NAME_KEY, DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGetImageServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGetImageServlet.java index d040278c5ac..bffa54f6f29 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGetImageServlet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGetImageServlet.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.util.KerberosName; import org.apache.hadoop.security.authorize.AccessControlList; @@ -66,7 +66,7 @@ public void testIsValidRequestor() throws IOException { AccessControlList acls = Mockito.mock(AccessControlList.class); Mockito.when(acls.isUserAllowed(Mockito.any())).thenReturn(false); ServletContext context = Mockito.mock(ServletContext.class); - Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls); + Mockito.when(context.getAttribute(HttpServer2.ADMINS_ACL)).thenReturn(acls); // Make sure that NN2 is considered a valid fsimage/edits requestor. assertTrue(GetImageServlet.isValidRequestor(context, diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestTransferFsImage.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestTransferFsImage.java index 3a5a8457a34..14d4441b4c2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestTransferFsImage.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestTransferFsImage.java @@ -37,7 +37,7 @@ import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.http.HttpServerFunctionalTest; import org.apache.hadoop.test.PathUtils; import org.apache.hadoop.util.StringUtils; @@ -119,7 +119,7 @@ public void testClientSideExceptionOnJustOneDir() throws IOException { */ @Test(timeout = 5000) public void testImageTransferTimeout() throws Exception { - HttpServer testServer = HttpServerFunctionalTest.createServer("hdfs"); + HttpServer2 testServer = HttpServerFunctionalTest.createServer("hdfs"); try { testServer.addServlet("GetImage", "/getimage", TestGetImageServlet.class); testServer.start(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java index 146065d39bd..99adda2d936 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java @@ -58,7 +58,7 @@ import org.apache.hadoop.hdfs.server.namenode.INodeFile; import org.apache.hadoop.hdfs.server.namenode.LeaseManager; import org.apache.hadoop.hdfs.server.namenode.NameNode; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.ipc.ProtobufRpcEngine.Server; import org.apache.hadoop.metrics2.impl.MetricsSystemImpl; import org.apache.hadoop.security.UserGroupInformation; @@ -89,7 +89,7 @@ public static void disableLogs() { setLevel2OFF(LogFactory.getLog(MetricsSystemImpl.class)); setLevel2OFF(DataBlockScanner.LOG); - setLevel2OFF(HttpServer.LOG); + setLevel2OFF(HttpServer2.LOG); setLevel2OFF(DataNode.LOG); setLevel2OFF(BlockPoolSliceStorage.LOG); setLevel2OFF(LeaseManager.LOG); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java index 2a62bde29ca..8f9271f5c2f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java @@ -37,7 +37,7 @@ import javax.servlet.http.HttpServletResponse; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobContext; import org.apache.hadoop.mapreduce.MRJobConfig; @@ -199,7 +199,7 @@ public void testNotifyRetries() throws InterruptedException { @Test public void testNotificationOnLastRetryNormalShutdown() throws Exception { - HttpServer server = startHttpServer(); + HttpServer2 server = startHttpServer(); // Act like it is the second attempt. Default max attempts is 2 MRApp app = spy(new MRAppWithCustomContainerAllocator( 2, 2, true, this.getClass().getName(), true, 2, true)); @@ -223,7 +223,7 @@ public void testNotificationOnLastRetryNormalShutdown() throws Exception { @Test public void testAbsentNotificationOnNotLastRetryUnregistrationFailure() throws Exception { - HttpServer server = startHttpServer(); + HttpServer2 server = startHttpServer(); MRApp app = spy(new MRAppWithCustomContainerAllocator(2, 2, false, this.getClass().getName(), true, 1, false)); doNothing().when(app).sysexit(); @@ -250,7 +250,7 @@ public void testAbsentNotificationOnNotLastRetryUnregistrationFailure() @Test public void testNotificationOnLastRetryUnregistrationFailure() throws Exception { - HttpServer server = startHttpServer(); + HttpServer2 server = startHttpServer(); MRApp app = spy(new MRAppWithCustomContainerAllocator(2, 2, false, this.getClass().getName(), true, 2, false)); doNothing().when(app).sysexit(); @@ -274,10 +274,10 @@ public void testNotificationOnLastRetryUnregistrationFailure() server.stop(); } - private static HttpServer startHttpServer() throws Exception { + private static HttpServer2 startHttpServer() throws Exception { new File(System.getProperty( "build.webapps", "build/webapps") + "/test").mkdirs(); - HttpServer server = new HttpServer.Builder().setName("test") + HttpServer2 server = new HttpServer2.Builder().setName("test") .addEndpoint(URI.create("http://localhost:0")) .setFindPort(true).build(); server.addServlet("jobend", "/jobend", JobEndServlet.class); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobEndNotifier.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobEndNotifier.java index 16b0e10e833..7d3e2edc448 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobEndNotifier.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobEndNotifier.java @@ -34,10 +34,10 @@ import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; public class TestJobEndNotifier extends TestCase { - HttpServer server; + HttpServer2 server; URL baseUrl; @SuppressWarnings("serial") @@ -102,7 +102,7 @@ public void doGet(HttpServletRequest request, public void setUp() throws Exception { new File(System.getProperty("build.webapps", "build/webapps") + "/test" ).mkdirs(); - server = new HttpServer.Builder().setName("test") + server = new HttpServer2.Builder().setName("test") .addEndpoint(URI.create("http://localhost:0")) .setFindPort(true).build(); server.addServlet("delay", "/delay", DelayServlet.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java index 90323ee089d..c5eda8a29e5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java @@ -28,7 +28,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -59,7 +59,7 @@ public enum HTTP { GET, POST, HEAD, PUT, DELETE }; private volatile String redirectPath; private volatile String wsName; private volatile Configuration conf; - private volatile HttpServer httpServer; + private volatile HttpServer2 httpServer; private volatile GuiceFilter guiceFilter; private final Router router = new Router(); @@ -72,11 +72,11 @@ public enum HTTP { GET, POST, HEAD, PUT, DELETE }; static final Splitter pathSplitter = Splitter.on('/').trimResults().omitEmptyStrings(); - void setHttpServer(HttpServer server) { + void setHttpServer(HttpServer2 server) { httpServer = checkNotNull(server, "http server"); } - @Provides public HttpServer httpServer() { return httpServer; } + @Provides public HttpServer2 httpServer() { return httpServer; } /** * Get the address the http server is bound to diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java index e0a37ea6dac..62f1e45c541 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java @@ -35,7 +35,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.security.AdminACLsManager; import org.slf4j.Logger; @@ -216,7 +216,7 @@ public void setup() { System.exit(1); } } - HttpServer.Builder builder = new HttpServer.Builder().setName(name) + HttpServer2.Builder builder = new HttpServer2.Builder().setName(name) .addEndpoint(URI.create("http://" + bindAddress + ":" + port)) .setConf(conf).setFindPort(findPort) .setACL(new AdminACLsManager(conf).getAdminAcl()) @@ -229,7 +229,7 @@ public void setup() { .setKeytabConfKey(spnegoKeytabKey) .setSecurityEnabled(UserGroupInformation.isSecurityEnabled()); } - HttpServer server = builder.build(); + HttpServer2 server = builder.build(); for(ServletStruct struct: servlets) { server.addServlet(struct.name, struct.spec, struct.clazz); @@ -237,7 +237,7 @@ public void setup() { for(Map.Entry entry : attributes.entrySet()) { server.setAttribute(entry.getKey(), entry.getValue()); } - HttpServer.defineFilter(server.getWebAppContext(), "guice", + HttpServer2.defineFilter(server.getWebAppContext(), "guice", GuiceFilter.class.getName(), null, new String[] { "/*" }); webapp.setConf(conf); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java index fae279a238b..cd18e4733e5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java @@ -24,7 +24,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.util.StringUtils; @@ -41,7 +41,7 @@ public class WebAppProxy extends AbstractService { public static final String PROXY_HOST_ATTRIBUTE = "proxyHost"; private static final Log LOG = LogFactory.getLog(WebAppProxy.class); - private HttpServer proxyServer = null; + private HttpServer2 proxyServer = null; private String bindAddress = null; private int port = 0; private AccessControlList acl = null; @@ -90,7 +90,7 @@ protected void serviceInit(Configuration conf) throws Exception { @Override protected void serviceStart() throws Exception { try { - proxyServer = new HttpServer.Builder().setName("proxy") + proxyServer = new HttpServer2.Builder().setName("proxy") .addEndpoint(URI.create("http://" + bindAddress + ":" + port)) .setFindPort(port == 0) .setConf(getConfig()).setACL(acl).build(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java index 58a7ff00238..f39ab3ecd10 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java @@ -43,7 +43,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; -import org.apache.hadoop.http.HttpServer; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.service.CompositeService; import org.apache.hadoop.util.StringUtils; @@ -274,7 +274,7 @@ public synchronized void init(Configuration conf) { private class WebAppProxyForTest extends WebAppProxy { - HttpServer proxyServer; + HttpServer2 proxyServer; AppReportFetcherForTest appReportFetcher; @Override @@ -286,7 +286,7 @@ public void start() { AccessControlList acl = new AccessControlList( conf.get(YarnConfiguration.YARN_ADMIN_ACL, YarnConfiguration.DEFAULT_YARN_ADMIN_ACL)); - proxyServer = new HttpServer.Builder() + proxyServer = new HttpServer2.Builder() .setName("proxy") .addEndpoint(URI.create("http://" + bindAddress + ":0")) .setFindPort(true)