HBASE-11980 Change sync to hsync, remove unused InfoServer, and reference our httpserver instead of hadoops

This commit is contained in:
stack 2014-09-29 22:28:12 -07:00
parent 6189b52fb0
commit aa432fd68f
5 changed files with 159 additions and 153 deletions

View File

@ -658,7 +658,7 @@
</property>
</activation>
<properties>
<hadoop.version>3.0-SNAPSHOT</hadoop.version>
<hadoop.version>${hadoop-three.version}</hadoop.version>
</properties>
<dependencies>
<dependency>

View File

@ -37,8 +37,9 @@ import org.apache.hadoop.conf.Configuration;
*/
@InterfaceAudience.Private
public class InfoServer {
private static final String HBASE_APP_DIR = "hbase-webapps";
private final HttpServer httpServer;
private final org.apache.hadoop.hbase.http.HttpServer httpServer;
/**
* Create a status server on the given port.
@ -53,7 +54,8 @@ public class InfoServer {
public InfoServer(String name, String bindAddress, int port, boolean findPort,
final Configuration c)
throws IOException {
HttpServer.Builder builder = new HttpServer.Builder();
HttpServer.Builder builder =
new org.apache.hadoop.hbase.http.HttpServer.Builder();
builder
.setName(name)
.addEndpoint(URI.create("http://" + bindAddress + ":" + port))

View File

@ -163,8 +163,8 @@ public class ProtobufLogWriter extends WriterBase {
@Override
public void sync() throws IOException {
try {
this.output.flush();
this.output.sync();
this.output.hflush();
this.output.hsync();
} catch (NullPointerException npe) {
// Concurrent close...
throw new IOException(npe);

View File

@ -1,132 +0,0 @@
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.util;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URL;
import java.util.Map;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpServer;
import org.mortbay.jetty.handler.ContextHandlerCollection;
import org.mortbay.jetty.servlet.Context;
import org.mortbay.jetty.servlet.DefaultServlet;
/**
* Create a Jetty embedded server to answer http requests. The primary goal
* is to serve up status information for the server.
* There are three contexts:
* "/stacks/" -> points to stack trace
* "/static/" -> points to common static files (src/hbase-webapps/static)
* "/" -> the jsp server code from (src/hbase-webapps/<name>)
*/
@InterfaceAudience.Private
public class InfoServer extends HttpServer {
private final Configuration config;
/**
* Create a status server on the given port.
* The jsp scripts are taken from src/hbase-webapps/<code>name<code>.
* @param name The name of the server
* @param bindAddress address to bind to
* @param port The port to use on the server
* @param findPort whether the server should start at the given port and
* increment by 1 until it finds a free port.
* @throws IOException e
*/
public InfoServer(String name, String bindAddress, int port, boolean findPort,
final Configuration c)
throws IOException {
super(name, bindAddress, port, findPort, c);
this.config = c;
fixupLogsServletLocation();
}
/**
* Fixup where the logs app points, make it point at hbase logs rather than
* hadoop logs.
*/
private void fixupLogsServletLocation() {
// Must be same as up in hadoop.
final String logsContextPath = "/logs";
// Now, put my logs in place of hadoops... disable old one first.
Context oldLogsContext = null;
for (Map.Entry<Context, Boolean> e : defaultContexts.entrySet()) {
if (e.getKey().getContextPath().equals(logsContextPath)) {
oldLogsContext = e.getKey();
break;
}
}
if (oldLogsContext != null) {
this.defaultContexts.put(oldLogsContext, Boolean.FALSE);
}
// Now do my logs.
// Set up the context for "/logs/" if "hbase.log.dir" property is defined.
String logDir = System.getProperty("hbase.log.dir");
if (logDir != null) {
// This is a little presumptious but seems to work.
Context logContext =
new Context((ContextHandlerCollection)this.webServer.getHandler(),
logsContextPath);
logContext.setResourceBase(logDir);
logContext.addServlet(DefaultServlet.class, "/");
HttpServerUtil.constrainHttpMethods(logContext);
defaultContexts.put(logContext, true);
}
}
/**
* Get the pathname to the webapps files.
* @param appName eg "secondary" or "datanode"
* @return the pathname as a URL
* @throws FileNotFoundException if 'webapps' directory cannot be found on CLASSPATH.
*/
protected String getWebAppsPath(String appName) throws FileNotFoundException {
// Copied from the super-class.
String resourceName = "hbase-webapps/" + appName;
URL url = getClass().getClassLoader().getResource(resourceName);
if (url == null)
throw new FileNotFoundException(resourceName + " not found in CLASSPATH");
String urlString = url.toString();
return urlString.substring(0, urlString.lastIndexOf('/'));
}
/**
* Get the pathname to the <code>path</code> files.
* @return the pathname as a URL
*/
protected String getWebAppsPath() throws IOException {
// Hack: webapps is not a unique enough element to find in CLASSPATH
// We'll more than likely find the hadoop webapps dir. So, instead
// look for the 'master' webapp in the webapps subdir. That should
// get us the hbase context. Presumption is that place where the
// master webapp resides is where we want this InfoServer picking up
// web applications.
final String master = "master";
String p = getWebAppsPath(master);
// Now strip master off the end if it is present
if(p.endsWith(master)) {
return p.substring(0, p.lastIndexOf(master));
}
return p;
}
}

168
pom.xml
View File

@ -865,6 +865,7 @@
<compileSource>1.7</compileSource>
<!-- Dependencies -->
<hadoop-two.version>2.4.0</hadoop-two.version>
<hadoop-three.version>3.0.0-SNAPSHOT</hadoop-three.version>
<commons-cli.version>1.2</commons-cli.version>
<commons-codec.version>1.7</commons-codec.version>
<!-- pretty outdated -->
@ -1640,24 +1641,159 @@
<value>3.0</value>
</property>
</activation>
<modules>
<!--For now, use hadoop2 compat module-->
<module>hbase-hadoop2-compat</module>
</modules>
<properties>
<hadoop.version>3.0.0-SNAPSHOT</hadoop.version>
<hadoop.version>${hadoop-three.version}</hadoop.version>
<!--Use this compat module for now. TODO: Make h3 one if we need one-->
<compat.module>hbase-hadoop2-compat</compat.module>
<assembly.file>src/main/assembly/hadoop-three-compat.xml</assembly.file>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
</dependency>
<!-- test deps for hadoop-0.24 profile -->
<!-- Says test, but scoped for compile. Which is correct? -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<version>${hadoop.version}</version>
<scope>compile</scope>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoop-three.version}</version>
<exclusions>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<version>${hadoop-three.version}</version>
<exclusions>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<version>${hadoop-three.version}</version>
<type>test-jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<exclusions>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>stax</groupId>
<artifactId>stax-api</artifactId>
</exclusion>
</exclusions>
<version>${hadoop-three.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop-three.version}</version>
<type>test-jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>stax</groupId>
<artifactId>stax-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<version>${hadoop-three.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop-three.version}</version>
<exclusions>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>stax</groupId>
<artifactId>stax-api</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop-three.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<version>${hadoop-three.version}</version>
</dependency>
<!-- This was marked as test dep in earlier pom, but was scoped compile.
Where do we actually need it? -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<version>${hadoop-three.version}</version>
<exclusions>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>stax</groupId>
<artifactId>stax-api</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
</dependencyManagement>
</profile>
<!-- profiles for the tests
See as well the properties of the project for the values