From ddc6a42f0ff6778129afa49bcd7d2559e35084bd Mon Sep 17 00:00:00 2001 From: stack Date: Mon, 29 Sep 2014 22:28:12 -0700 Subject: [PATCH] HBASE-11980 Change sync to hsync, remove unused InfoServer, and reference our httpserver instead of hadoops --- hbase-server/pom.xml | 2 +- .../apache/hadoop/hbase/http/InfoServer.java | 6 +- .../regionserver/wal/ProtobufLogWriter.java | 4 +- .../apache/hadoop/hbase/util/InfoServer.java | 132 -------------- pom.xml | 168 ++++++++++++++++-- 5 files changed, 159 insertions(+), 153 deletions(-) delete mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/util/InfoServer.java diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml index 7dd8471a414..3b254661745 100644 --- a/hbase-server/pom.xml +++ b/hbase-server/pom.xml @@ -658,7 +658,7 @@ - 3.0-SNAPSHOT + ${hadoop-three.version} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java index 4fe2660a437..ffaaeaa279d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java @@ -37,8 +37,9 @@ import org.apache.hadoop.conf.Configuration; */ @InterfaceAudience.Private public class InfoServer { + private static final String HBASE_APP_DIR = "hbase-webapps"; - private final HttpServer httpServer; + private final org.apache.hadoop.hbase.http.HttpServer httpServer; /** * Create a status server on the given port. @@ -53,7 +54,8 @@ public class InfoServer { public InfoServer(String name, String bindAddress, int port, boolean findPort, final Configuration c) throws IOException { - HttpServer.Builder builder = new HttpServer.Builder(); + HttpServer.Builder builder = + new org.apache.hadoop.hbase.http.HttpServer.Builder(); builder .setName(name) .addEndpoint(URI.create("http://" + bindAddress + ":" + port)) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java index 1ab97595564..14ffa4547e9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java @@ -163,8 +163,8 @@ public class ProtobufLogWriter extends WriterBase { @Override public void sync() throws IOException { try { - this.output.flush(); - this.output.sync(); + this.output.hflush(); + this.output.hsync(); } catch (NullPointerException npe) { // Concurrent close... throw new IOException(npe); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/InfoServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/InfoServer.java deleted file mode 100644 index 24ef5c01652..00000000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/InfoServer.java +++ /dev/null @@ -1,132 +0,0 @@ -/** - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.util; - -import java.io.FileNotFoundException; -import java.io.IOException; -import java.net.URL; -import java.util.Map; - -import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.http.HttpServer; -import org.mortbay.jetty.handler.ContextHandlerCollection; -import org.mortbay.jetty.servlet.Context; -import org.mortbay.jetty.servlet.DefaultServlet; - -/** - * Create a Jetty embedded server to answer http requests. The primary goal - * is to serve up status information for the server. - * There are three contexts: - * "/stacks/" -> points to stack trace - * "/static/" -> points to common static files (src/hbase-webapps/static) - * "/" -> the jsp server code from (src/hbase-webapps/) - */ -@InterfaceAudience.Private -public class InfoServer extends HttpServer { - private final Configuration config; - - /** - * Create a status server on the given port. - * The jsp scripts are taken from src/hbase-webapps/name. - * @param name The name of the server - * @param bindAddress address to bind to - * @param port The port to use on the server - * @param findPort whether the server should start at the given port and - * increment by 1 until it finds a free port. - * @throws IOException e - */ - public InfoServer(String name, String bindAddress, int port, boolean findPort, - final Configuration c) - throws IOException { - super(name, bindAddress, port, findPort, c); - this.config = c; - fixupLogsServletLocation(); - } - - /** - * Fixup where the logs app points, make it point at hbase logs rather than - * hadoop logs. - */ - private void fixupLogsServletLocation() { - // Must be same as up in hadoop. - final String logsContextPath = "/logs"; - // Now, put my logs in place of hadoops... disable old one first. - Context oldLogsContext = null; - for (Map.Entry e : defaultContexts.entrySet()) { - if (e.getKey().getContextPath().equals(logsContextPath)) { - oldLogsContext = e.getKey(); - break; - } - } - if (oldLogsContext != null) { - this.defaultContexts.put(oldLogsContext, Boolean.FALSE); - } - // Now do my logs. - // Set up the context for "/logs/" if "hbase.log.dir" property is defined. - String logDir = System.getProperty("hbase.log.dir"); - if (logDir != null) { - // This is a little presumptious but seems to work. - Context logContext = - new Context((ContextHandlerCollection)this.webServer.getHandler(), - logsContextPath); - logContext.setResourceBase(logDir); - logContext.addServlet(DefaultServlet.class, "/"); - HttpServerUtil.constrainHttpMethods(logContext); - defaultContexts.put(logContext, true); - } - } - - /** - * Get the pathname to the webapps files. - * @param appName eg "secondary" or "datanode" - * @return the pathname as a URL - * @throws FileNotFoundException if 'webapps' directory cannot be found on CLASSPATH. - */ - protected String getWebAppsPath(String appName) throws FileNotFoundException { - // Copied from the super-class. - String resourceName = "hbase-webapps/" + appName; - URL url = getClass().getClassLoader().getResource(resourceName); - if (url == null) - throw new FileNotFoundException(resourceName + " not found in CLASSPATH"); - String urlString = url.toString(); - return urlString.substring(0, urlString.lastIndexOf('/')); - } - - /** - * Get the pathname to the path files. - * @return the pathname as a URL - */ - protected String getWebAppsPath() throws IOException { - // Hack: webapps is not a unique enough element to find in CLASSPATH - // We'll more than likely find the hadoop webapps dir. So, instead - // look for the 'master' webapp in the webapps subdir. That should - // get us the hbase context. Presumption is that place where the - // master webapp resides is where we want this InfoServer picking up - // web applications. - final String master = "master"; - String p = getWebAppsPath(master); - // Now strip master off the end if it is present - if(p.endsWith(master)) { - return p.substring(0, p.lastIndexOf(master)); - } - return p; - } -} diff --git a/pom.xml b/pom.xml index e59b82f67fd..e54ba439d74 100644 --- a/pom.xml +++ b/pom.xml @@ -903,6 +903,7 @@ 1.7 2.4.0 + 3.0.0-SNAPSHOT 1.2 1.7 @@ -1681,24 +1682,159 @@ 3.0 + + + hbase-hadoop2-compat + - 3.0.0-SNAPSHOT + ${hadoop-three.version} + + hbase-hadoop2-compat + src/main/assembly/hadoop-three-compat.xml - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - - - - - org.apache.hadoop - hadoop-minicluster - ${hadoop.version} - compile - - + + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop-three.version} + + + io.netty + netty + + + + + org.apache.hadoop + hadoop-mapreduce-client-jobclient + ${hadoop-three.version} + + + io.netty + netty + + + + + org.apache.hadoop + hadoop-mapreduce-client-jobclient + ${hadoop-three.version} + test-jar + test + + + io.netty + netty + + + + + org.apache.hadoop + hadoop-hdfs + + + javax.servlet.jsp + jsp-api + + + javax.servlet + servlet-api + + + stax + stax-api + + + ${hadoop-three.version} + + + org.apache.hadoop + hadoop-hdfs + ${hadoop-three.version} + test-jar + test + + + javax.servlet.jsp + jsp-api + + + javax.servlet + servlet-api + + + stax + stax-api + + + + + org.apache.hadoop + hadoop-auth + ${hadoop-three.version} + + + org.apache.hadoop + hadoop-common + ${hadoop-three.version} + + + javax.servlet.jsp + jsp-api + + + javax.servlet + servlet-api + + + stax + stax-api + + + io.netty + netty + + + + + org.apache.hadoop + hadoop-client + ${hadoop-three.version} + + + org.apache.hadoop + hadoop-annotations + ${hadoop-three.version} + + + + org.apache.hadoop + hadoop-minicluster + ${hadoop-three.version} + + + javax.servlet.jsp + jsp-api + + + javax.servlet + servlet-api + + + stax + stax-api + + + io.netty + netty + + + + + +