HBASE-19053 Split out o.a.h.h.http from hbase-server into a separate module

Change-Id: Ie3a688b789104df7feaf34ac9fb326a79d6a3960
This commit is contained in:
Apekshit Sharma 2017-10-21 18:12:07 -07:00
parent 94748a3c93
commit d6982414c1
54 changed files with 901 additions and 244 deletions

View File

@ -263,6 +263,10 @@
<groupId>com.github.stephenc.findbugs</groupId> <groupId>com.github.stephenc.findbugs</groupId>
<artifactId>findbugs-annotations</artifactId> <artifactId>findbugs-annotations</artifactId>
</dependency> </dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.mockito</groupId> <groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId> <artifactId>mockito-all</artifactId>

View File

@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.util;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.io.PrintWriter; import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.management.ManagementFactory; import java.lang.management.ManagementFactory;
import java.lang.reflect.Array; import java.lang.reflect.Array;
import java.util.Iterator; import java.util.Iterator;
@ -42,8 +41,8 @@ import javax.management.openmbean.CompositeType;
import javax.management.openmbean.TabularData; import javax.management.openmbean.TabularData;
import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonGenerationException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -99,12 +98,7 @@ public class JSONBean {
} }
/** /**
* @param mBeanServer
* @param qry
* @param attribute
* @param description
* @return Return non-zero if failed to find bean. 0 * @return Return non-zero if failed to find bean. 0
* @throws IOException
*/ */
private static int write(final JsonGenerator jg, private static int write(final JsonGenerator jg,
final MBeanServer mBeanServer, ObjectName qry, String attribute, final MBeanServer mBeanServer, ObjectName qry, String attribute,
@ -340,31 +334,6 @@ public class JSONBean {
} }
} }
/**
* Dump out a subset of regionserver mbeans only, not all of them, as json on System.out.
* @throws MalformedObjectNameException
* @throws IOException
*/
public static String dumpRegionServerMetrics() throws MalformedObjectNameException, IOException {
StringWriter sw = new StringWriter(1024 * 100); // Guess this size
try (PrintWriter writer = new PrintWriter(sw)) {
JSONBean dumper = new JSONBean();
try (JSONBean.Writer jsonBeanWriter = dumper.open(writer)) {
MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
jsonBeanWriter.write(mbeanServer,
new ObjectName("java.lang:type=Memory"), null, false);
jsonBeanWriter.write(mbeanServer,
new ObjectName("Hadoop:service=HBase,name=RegionServer,sub=IPC"), null, false);
jsonBeanWriter.write(mbeanServer,
new ObjectName("Hadoop:service=HBase,name=RegionServer,sub=Replication"), null, false);
jsonBeanWriter.write(mbeanServer,
new ObjectName("Hadoop:service=HBase,name=RegionServer,sub=Server"), null, false);
}
}
sw.close();
return sw.toString();
}
/** /**
* Dump out all registered mbeans as json on System.out. * Dump out all registered mbeans as json on System.out.
* @throws IOException * @throws IOException
@ -379,9 +348,4 @@ public class JSONBean {
} }
} }
} }
public static void main(String[] args) throws IOException, MalformedObjectNameException {
String str = dumpRegionServerMetrics();
System.out.println(str);
}
} }

View File

@ -91,7 +91,7 @@ public final class JSONMetricUtil {
/** /**
* Returns a subset of mbeans defined by qry. * Returns a subset of mbeans defined by qry.
* Modeled after {@link JSONBean#dumpRegionServerMetrics()} * Modeled after DumpRegionServerMetrics#dumpMetrics.
* Example: String qry= "java.lang:type=Memory" * Example: String qry= "java.lang:type=Memory"
* @throws MalformedObjectNameException if json have bad format * @throws MalformedObjectNameException if json have bad format
* @throws IOException / * @throws IOException /

View File

@ -174,6 +174,12 @@
<type>test-jar</type> <type>test-jar</type>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-http</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<!-- The coprocessor.Export needs mapreduce.Import and mapreduce.Export to run the unit tests --> <!-- The coprocessor.Export needs mapreduce.Import and mapreduce.Export to run the unit tests -->
<!-- see org.apache.hadoop.hbase.coprocessor.TestImportExport --> <!-- see org.apache.hadoop.hbase.coprocessor.TestImportExport -->
<dependency> <dependency>

515
hbase-http/pom.xml Normal file
View File

@ -0,0 +1,515 @@
<?xml version="1.0"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-->
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>hbase-build-configuration</artifactId>
<groupId>org.apache.hbase</groupId>
<version>3.0.0-SNAPSHOT</version>
<relativePath>../hbase-build-configuration</relativePath>
</parent>
<artifactId>hbase-http</artifactId>
<name>Apache HBase - HTTP</name>
<description>HTTP functionality for HBase Servers</description>
<build>
<!-- Makes sure the resources get added before they are processed
by placing this first -->
<testResources>
<!-- Our test artifact has different license info than our source/bin ones -->
<testResource>
<directory>src/test/resources/META-INF/</directory>
<targetPath>META-INF/</targetPath>
<includes>
<include>NOTICE</include>
</includes>
<filtering>true</filtering>
</testResource>
<testResource>
<directory>src/test/resources</directory>
<includes>
<include>**/**</include>
</includes>
</testResource>
</testResources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<!-- licensing info from our bundled works -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-remote-resources-plugin</artifactId>
<version>1.5</version>
<executions>
<execution>
<id>default</id>
<configuration>
<attachToTest>false</attachToTest>
<properties>
<copyright-end-year>${build.year}</copyright-end-year>
<debug-print-included-work-info>${license.debug.print.included}</debug-print-included-work-info>
<bundled-dependencies>${license.bundles.dependencies}</bundled-dependencies>
<bundled-jquery>${license.bundles.jquery}</bundled-jquery>
<bundled-logo>${license.bundles.logo}</bundled-logo>
<bundled-bootstrap>${license.bundles.bootstrap}</bundled-bootstrap>
</properties>
<resourceBundles>
<resourceBundle>${project.groupId}:hbase-resource-bundle:${project.version}</resourceBundle>
</resourceBundles>
<supplementalModelArtifacts>
<supplementalModelArtifact>${project.groupId}:hbase-resource-bundle:${project.version}</supplementalModelArtifact>
</supplementalModelArtifacts>
<supplementalModels>
<supplementalModel>supplemental-models.xml</supplementalModel>
</supplementalModels>
</configuration>
</execution>
</executions>
</plugin>
<!-- Run with -Dmaven.test.skip.exec=true to build -tests.jar without running
tests (this is needed for upstream projects whose tests need this jar simply for
compilation) -->
<plugin>
<!--Make it so assembly:single does nothing in here-->
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<skipAssembly>true</skipAssembly>
</configuration>
</plugin>
<!-- Make a jar and put the sources in the jar -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>jar</goal>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- Run findbugs -->
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
</plugin>
<!-- Testing plugins -->
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<properties>
<property>
<name>listener</name>
<value>org.apache.hadoop.hbase.ResourceCheckerJUnitListener</value>
</property>
</properties>
<systemPropertyVariables>
<test.build.webapps>target/test-classes/webapps</test.build.webapps>
</systemPropertyVariables>
</configuration>
</plugin>
</plugins>
<!-- General Resources -->
<pluginManagement>
<plugins>
<!--This plugin's configuration is used to store Eclipse m2e settings
only. It has no influence on the Maven build itself and needs to
be kept in plugin management, not in the actual plugins. -->
<plugin>
<groupId>org.eclipse.m2e</groupId>
<artifactId>lifecycle-mapping</artifactId>
<configuration>
<lifecycleMappingMetadata>
<pluginExecutions>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<versionRange>[1.6,)</versionRange>
<goals>
<goal>run</goal>
</goals>
</pluginExecutionFilter>
<action>
<execute>
<runOnIncremental>false</runOnIncremental>
<runOnConfiguration>true</runOnConfiguration>
</execute>
</action>
</pluginExecution>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<versionRange>[2.8,)</versionRange>
<goals>
<goal>build-classpath</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore></ignore>
</action>
</pluginExecution>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<versionRange>[3.2,)</versionRange>
<goals>
<goal>compile</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore></ignore>
</action>
</pluginExecution>
</pluginExecutions>
</lifecycleMappingMetadata>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
<dependencies>
<dependency>
<groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>hbase-shaded-miscellaneous</artifactId>
</dependency>
<!-- Intra-project dependencies -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<!-- resource bundle only needed at build time -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-resource-bundle</artifactId>
<version>${project.version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util-ajax</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-security</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-server</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
</dependency>
<!-- General dependencies -->
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
</dependency>
<dependency>
<groupId>javax.ws.rs</groupId>
<artifactId>javax.ws.rs-api</artifactId>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>org.apache.kerby</groupId>
<artifactId>kerb-simplekdc</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk16</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.kerby</groupId>
<artifactId>kerb-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>
<!-- Needs to make the profile in apache parent pom -->
<profile>
<id>apache-release</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<executions>
<execution>
<id>license-javadocs</id>
<phase>prepare-package</phase>
<goals>
<goal>copy-resources</goal>
</goals>
<configuration>
<outputDirectory>${project.build.directory}/apidocs</outputDirectory>
<resources>
<resource>
<directory>src/main/javadoc/META-INF/</directory>
<targetPath>META-INF/</targetPath>
<includes>
<include>LICENSE</include>
<include>NOTICE</include>
</includes>
<filtering>true</filtering>
</resource>
</resources>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<!-- Skip the tests in this module -->
<profile>
<id>skipServerTests</id>
<activation>
<property>
<name>skipServerTests</name>
</property>
</activation>
<properties>
<surefire.skipFirstPart>true</surefire.skipFirstPart>
<surefire.skipSecondPart>true</surefire.skipSecondPart>
</properties>
</profile>
<!-- Special builds -->
<profile>
<id>native</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>make</id>
<phase>compile</phase>
<goals><goal>run</goal></goals>
<configuration>
<target>
<mkdir dir="${project.build.directory}/native"/>
<exec executable="cmake" dir="${project.build.directory}/native" failonerror="true">
<arg line="${basedir}/src/main/native -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/>
</exec>
<exec executable="make" dir="${project.build.directory}/native" failonerror="true">
<arg line="VERBOSE=1"/>
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<!-- Profiles for building against different hadoop versions -->
<!-- There are a lot of common dependencies used here, should investigate
if we can combine these profiles somehow -->
<!-- profile for building against Hadoop 2.x. This is the default. -->
<profile>
<id>hadoop-2.0</id>
<activation>
<property>
<!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h2--><name>!hadoop.profile</name>
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>create-mrapp-generated-classpath</id>
<phase>generate-test-resources</phase>
<goals>
<goal>build-classpath</goal>
</goals>
<configuration>
<!-- needed to run the unit test for DS to generate
the required classpath that is required in the env
of the launch container in the mini mr/yarn cluster
-->
<outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<!--
profile for building against Hadoop 3.0.x. Activate using:
mvn -Dhadoop.profile=3.0
-->
<profile>
<id>hadoop-3.0</id>
<activation>
<property>
<name>hadoop.profile</name>
<value>3.0</value>
</property>
</activation>
<properties>
<hadoop.version>${hadoop-three.version}</hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>create-mrapp-generated-classpath</id>
<phase>generate-test-resources</phase>
<goals>
<goal>build-classpath</goal>
</goals>
<configuration>
<!-- needed to run the unit test for DS to generate
the required classpath that is required in the env
of the launch container in the mini mr/yarn cluster
-->
<outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.http;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
/** /**
* Initialize a javax.servlet.Filter. * Initialize a javax.servlet.Filter.
*/ */
public abstract class FilterInitializer { public abstract class FilterInitializer {
/** /**

View File

@ -92,7 +92,7 @@ public class HtmlQuoting {
} }
} }
} }
/** /**
* Quote the given item to make it html-safe. * Quote the given item to make it html-safe.
* @param item the string to quote * @param item the string to quote
@ -130,18 +130,18 @@ public class HtmlQuoting {
public void write(byte[] data, int off, int len) throws IOException { public void write(byte[] data, int off, int len) throws IOException {
quoteHtmlChars(out, data, off, len); quoteHtmlChars(out, data, off, len);
} }
@Override @Override
public void write(int b) throws IOException { public void write(int b) throws IOException {
data[0] = (byte) b; data[0] = (byte) b;
quoteHtmlChars(out, data, 0, 1); quoteHtmlChars(out, data, 0, 1);
} }
@Override @Override
public void flush() throws IOException { public void flush() throws IOException {
out.flush(); out.flush();
} }
@Override @Override
public void close() throws IOException { public void close() throws IOException {
out.close(); out.close();
@ -173,7 +173,7 @@ public class HtmlQuoting {
next += 5; next += 5;
} else if (item.startsWith("&apos;", next)) { } else if (item.startsWith("&apos;", next)) {
buffer.append('\''); buffer.append('\'');
next += 6; next += 6;
} else if (item.startsWith("&gt;", next)) { } else if (item.startsWith("&gt;", next)) {
buffer.append('>'); buffer.append('>');
next += 4; next += 4;
@ -188,7 +188,7 @@ public class HtmlQuoting {
if (end == 0) { if (end == 0) {
end = len; end = len;
} }
throw new IllegalArgumentException("Bad HTML quoting for " + throw new IllegalArgumentException("Bad HTML quoting for " +
item.substring(next,end)); item.substring(next,end));
} }
posn = next; posn = next;
@ -197,7 +197,7 @@ public class HtmlQuoting {
buffer.append(item.substring(posn, len)); buffer.append(item.substring(posn, len));
return buffer.toString(); return buffer.toString();
} }
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
if (args.length == 0) { if (args.length == 0) {
throw new IllegalArgumentException("Please provide some arguments"); throw new IllegalArgumentException("Please provide some arguments");

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.util; package org.apache.hadoop.hbase.http;
import org.eclipse.jetty.security.ConstraintSecurityHandler; import org.eclipse.jetty.security.ConstraintSecurityHandler;
import org.eclipse.jetty.util.security.Constraint; import org.eclipse.jetty.util.security.Constraint;

View File

@ -38,7 +38,7 @@ import org.apache.hadoop.conf.Configuration;
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
public class InfoServer { public class InfoServer {
private static final String HBASE_APP_DIR = "hbase-webapps"; private static final String HBASE_APP_DIR = "hbase-webapps";
private final org.apache.hadoop.hbase.http.HttpServer httpServer; private final org.apache.hadoop.hbase.http.HttpServer httpServer;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.http;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability; import org.apache.yetus.audience.InterfaceStability;
/** /**
* This interface contains constants for configuration keys used * This interface contains constants for configuration keys used
* in the hbase http server code. * in the hbase http server code.
*/ */

View File

@ -5,9 +5,9 @@
* The ASF licenses this file to You under the Apache License, Version 2.0 * The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with * (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at * the License. You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -57,23 +57,23 @@ import org.apache.hadoop.hbase.util.JSONBean;
* all hadoop metrics exposed through JMX. * all hadoop metrics exposed through JMX.
* </p> * </p>
* <p> * <p>
* The optional <code>get</code> parameter is used to query an specific * The optional <code>get</code> parameter is used to query an specific
* attribute of a JMX bean. The format of the URL is * attribute of a JMX bean. The format of the URL is
* <code>http://.../jmx?get=MXBeanName::AttributeName</code> * <code>http://.../jmx?get=MXBeanName::AttributeName</code>
* </p> * </p>
* <p> * <p>
* For example * For example
* <code> * <code>
* http://../jmx?get=Hadoop:service=NameNode,name=NameNodeInfo::ClusterId * http://../jmx?get=Hadoop:service=NameNode,name=NameNodeInfo::ClusterId
* </code> will return the cluster id of the namenode mxbean. * </code> will return the cluster id of the namenode mxbean.
* </p> * </p>
* <p> * <p>
* If the <code>qry</code> or the <code>get</code> parameter is not formatted * If the <code>qry</code> or the <code>get</code> parameter is not formatted
* correctly then a 400 BAD REQUEST http response code will be returned. * correctly then a 400 BAD REQUEST http response code will be returned.
* </p> * </p>
* <p> * <p>
* If a resouce such as a mbean or attribute can not be found, * If a resouce such as a mbean or attribute can not be found,
* a 404 SC_NOT_FOUND http response code will be returned. * a 404 SC_NOT_FOUND http response code will be returned.
* </p> * </p>
* <p> * <p>
* The return format is JSON and in the form * The return format is JSON and in the form
@ -91,24 +91,24 @@ import org.apache.hadoop.hbase.util.JSONBean;
* <p> * <p>
* The servlet attempts to convert the the JMXBeans into JSON. Each * The servlet attempts to convert the the JMXBeans into JSON. Each
* bean's attributes will be converted to a JSON object member. * bean's attributes will be converted to a JSON object member.
* *
* If the attribute is a boolean, a number, a string, or an array * If the attribute is a boolean, a number, a string, or an array
* it will be converted to the JSON equivalent. * it will be converted to the JSON equivalent.
* *
* If the value is a {@link CompositeData} then it will be converted * If the value is a {@link CompositeData} then it will be converted
* to a JSON object with the keys as the name of the JSON member and * to a JSON object with the keys as the name of the JSON member and
* the value is converted following these same rules. * the value is converted following these same rules.
* *
* If the value is a {@link TabularData} then it will be converted * If the value is a {@link TabularData} then it will be converted
* to an array of the {@link CompositeData} elements that it contains. * to an array of the {@link CompositeData} elements that it contains.
* *
* All other objects will be converted to a string and output as such. * All other objects will be converted to a string and output as such.
* *
* The bean's name and modelerType will be returned for all beans. * The bean's name and modelerType will be returned for all beans.
* *
* Optional paramater "callback" should be used to deliver JSONP response. * Optional paramater "callback" should be used to deliver JSONP response.
* </p> * </p>
* *
*/ */
public class JMXJsonServlet extends HttpServlet { public class JMXJsonServlet extends HttpServlet {
private static final Log LOG = LogFactory.getLog(JMXJsonServlet.class); private static final Log LOG = LogFactory.getLog(JMXJsonServlet.class);
@ -142,7 +142,7 @@ public class JMXJsonServlet extends HttpServlet {
/** /**
* Process a GET request for the specified resource. * Process a GET request for the specified resource.
* *
* @param request * @param request
* The servlet request we are processing * The servlet request we are processing
* @param response * @param response
@ -161,7 +161,7 @@ public class JMXJsonServlet extends HttpServlet {
jsonpcb = checkCallbackName(request.getParameter(CALLBACK_PARAM)); jsonpcb = checkCallbackName(request.getParameter(CALLBACK_PARAM));
writer = response.getWriter(); writer = response.getWriter();
beanWriter = this.jsonBeanWriter.open(writer); beanWriter = this.jsonBeanWriter.open(writer);
// "callback" parameter implies JSONP outpout // "callback" parameter implies JSONP outpout
if (jsonpcb != null) { if (jsonpcb != null) {
response.setContentType("application/javascript; charset=utf8"); response.setContentType("application/javascript; charset=utf8");

View File

@ -5,9 +5,9 @@
* The ASF licenses this file to You under the Apache License, Version 2.0 * The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with * (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at * the License. You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

View File

@ -77,7 +77,7 @@ public class StaticUserWebFilter extends FilterInitializer {
@Override @Override
public String toString() { public String toString() {
return name; return name;
} }
} }
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
@ -99,7 +99,7 @@ public class StaticUserWebFilter extends FilterInitializer {
if (httpRequest.getRemoteUser() != null) { if (httpRequest.getRemoteUser() != null) {
chain.doFilter(request, response); chain.doFilter(request, response);
} else { } else {
HttpServletRequestWrapper wrapper = HttpServletRequestWrapper wrapper =
new HttpServletRequestWrapper(httpRequest) { new HttpServletRequestWrapper(httpRequest) {
@Override @Override
public Principal getUserPrincipal() { public Principal getUserPrincipal() {
@ -119,18 +119,18 @@ public class StaticUserWebFilter extends FilterInitializer {
this.username = conf.getInitParameter(HBASE_HTTP_STATIC_USER); this.username = conf.getInitParameter(HBASE_HTTP_STATIC_USER);
this.user = new User(username); this.user = new User(username);
} }
} }
@Override @Override
public void initFilter(FilterContainer container, Configuration conf) { public void initFilter(FilterContainer container, Configuration conf) {
HashMap<String, String> options = new HashMap<>(); HashMap<String, String> options = new HashMap<>();
String username = getUsernameFromConf(conf); String username = getUsernameFromConf(conf);
options.put(HBASE_HTTP_STATIC_USER, username); options.put(HBASE_HTTP_STATIC_USER, username);
container.addFilter("static_user_filter", container.addFilter("static_user_filter",
StaticUserFilter.class.getName(), StaticUserFilter.class.getName(),
options); options);
} }
@ -142,7 +142,7 @@ public class StaticUserWebFilter extends FilterInitializer {
if (oldStyleUgi != null) { if (oldStyleUgi != null) {
// We can't use the normal configuration deprecation mechanism here // We can't use the normal configuration deprecation mechanism here
// since we need to split out the username from the configured UGI. // since we need to split out the username from the configured UGI.
LOG.warn(DEPRECATED_UGI_KEY + " should not be used. Instead, use " + LOG.warn(DEPRECATED_UGI_KEY + " should not be used. Instead, use " +
HBASE_HTTP_STATIC_USER + "."); HBASE_HTTP_STATIC_USER + ".");
String[] parts = oldStyleUgi.split(","); String[] parts = oldStyleUgi.split(",");
return parts[0]; return parts[0];

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.http; package org.apache.hadoop.hbase.http;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
@ -41,7 +41,7 @@ public class HttpServerFunctionalTest extends Assert {
public static final String TEST_BUILD_WEBAPPS = "test.build.webapps"; public static final String TEST_BUILD_WEBAPPS = "test.build.webapps";
/** expected location of the test.build.webapps dir: {@value} */ /** expected location of the test.build.webapps dir: {@value} */
private static final String BUILD_WEBAPPS_DIR = "src/main/resources/hbase-webapps"; private static final String BUILD_WEBAPPS_DIR = "src/main/resources/hbase-webapps";
/** name of the test webapp: {@value} */ /** name of the test webapp: {@value} */
private static final String TEST = "test"; private static final String TEST = "test";
@ -89,7 +89,7 @@ public class HttpServerFunctionalTest extends Assert {
* @throws IOException if a problem occurs * @throws IOException if a problem occurs
* @throws AssertionError if a condition was not met * @throws AssertionError if a condition was not met
*/ */
public static HttpServer createTestServer(Configuration conf, public static HttpServer createTestServer(Configuration conf,
String[] pathSpecs) throws IOException { String[] pathSpecs) throws IOException {
prepareTestWebapp(); prepareTestWebapp();
return createServer(TEST, conf, pathSpecs); return createServer(TEST, conf, pathSpecs);
@ -167,7 +167,7 @@ public class HttpServerFunctionalTest extends Assert {
return new HttpServer.Builder().setName(webapp).addEndpoint( return new HttpServer.Builder().setName(webapp).addEndpoint(
URI.create("http://localhost:0")); URI.create("http://localhost:0"));
} }
/** /**
* Create an HttpServer instance for the given webapp * Create an HttpServer instance for the given webapp
* @param webapp the webapp to work with * @param webapp the webapp to work with

View File

@ -83,15 +83,15 @@ public class TestGlobalFilter extends HttpServerFunctionalTest {
} }
} }
} }
/** access a url, ignoring some IOException such as the page does not exist */ /** access a url, ignoring some IOException such as the page does not exist */
static void access(String urlstring) throws IOException { static void access(String urlstring) throws IOException {
LOG.warn("access " + urlstring); LOG.warn("access " + urlstring);
URL url = new URL(urlstring); URL url = new URL(urlstring);
URLConnection connection = url.openConnection(); URLConnection connection = url.openConnection();
connection.connect(); connection.connect();
try { try {
BufferedReader in = new BufferedReader(new InputStreamReader( BufferedReader in = new BufferedReader(new InputStreamReader(
connection.getInputStream())); connection.getInputStream()));
@ -108,7 +108,7 @@ public class TestGlobalFilter extends HttpServerFunctionalTest {
@Test @Test
public void testServletFilter() throws Exception { public void testServletFilter() throws Exception {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
//start a http server with CountingFilter //start a http server with CountingFilter
conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY, conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
RecordingFilter.Initializer.class.getName()); RecordingFilter.Initializer.class.getName());
@ -126,7 +126,7 @@ public class TestGlobalFilter extends HttpServerFunctionalTest {
final String outURL = "/static/a.out"; final String outURL = "/static/a.out";
final String logURL = "/logs/a.log"; final String logURL = "/logs/a.log";
final String[] urls = {fsckURL, stacksURL, ajspURL, listPathsURL, final String[] urls = {fsckURL, stacksURL, ajspURL, listPathsURL,
dataURL, streamFile, rootURL, allURL, outURL, logURL}; dataURL, streamFile, rootURL, allURL, outURL, logURL};
//access the urls //access the urls
@ -141,7 +141,7 @@ public class TestGlobalFilter extends HttpServerFunctionalTest {
} }
LOG.info("RECORDS = " + RECORDS); LOG.info("RECORDS = " + RECORDS);
//verify records //verify records
for(int i = 0; i < urls.length; i++) { for(int i = 0; i < urls.length; i++) {
assertTrue(RECORDS.remove(urls[i])); assertTrue(RECORDS.remove(urls[i]));

View File

@ -51,10 +51,10 @@ public class TestHtmlQuoting {
} }
private void runRoundTrip(String str) throws Exception { private void runRoundTrip(String str) throws Exception {
assertEquals(str, assertEquals(str,
HtmlQuoting.unquoteHtmlChars(HtmlQuoting.quoteHtmlChars(str))); HtmlQuoting.unquoteHtmlChars(HtmlQuoting.quoteHtmlChars(str)));
} }
@Test public void testRoundtrip() throws Exception { @Test public void testRoundtrip() throws Exception {
runRoundTrip(""); runRoundTrip("");
runRoundTrip("<>&'\""); runRoundTrip("<>&'\"");
@ -67,18 +67,18 @@ public class TestHtmlQuoting {
} }
runRoundTrip(buffer.toString()); runRoundTrip(buffer.toString());
} }
@Test @Test
public void testRequestQuoting() throws Exception { public void testRequestQuoting() throws Exception {
HttpServletRequest mockReq = Mockito.mock(HttpServletRequest.class); HttpServletRequest mockReq = Mockito.mock(HttpServletRequest.class);
HttpServer.QuotingInputFilter.RequestQuoter quoter = HttpServer.QuotingInputFilter.RequestQuoter quoter =
new HttpServer.QuotingInputFilter.RequestQuoter(mockReq); new HttpServer.QuotingInputFilter.RequestQuoter(mockReq);
Mockito.doReturn("a<b").when(mockReq).getParameter("x"); Mockito.doReturn("a<b").when(mockReq).getParameter("x");
assertEquals("Test simple param quoting", assertEquals("Test simple param quoting",
"a&lt;b", quoter.getParameter("x")); "a&lt;b", quoter.getParameter("x"));
Mockito.doReturn(null).when(mockReq).getParameter("x"); Mockito.doReturn(null).when(mockReq).getParameter("x");
assertEquals("Test that missing parameters dont cause NPE", assertEquals("Test that missing parameters dont cause NPE",
null, quoter.getParameter("x")); null, quoter.getParameter("x"));

View File

@ -76,11 +76,11 @@ public class TestHttpServer extends HttpServerFunctionalTest {
private static URL baseUrl; private static URL baseUrl;
// jetty 9.4.x needs this many threads to start, even in the small. // jetty 9.4.x needs this many threads to start, even in the small.
static final int MAX_THREADS = 16; static final int MAX_THREADS = 16;
@SuppressWarnings("serial") @SuppressWarnings("serial")
public static class EchoMapServlet extends HttpServlet { public static class EchoMapServlet extends HttpServlet {
@Override @Override
public void doGet(HttpServletRequest request, public void doGet(HttpServletRequest request,
HttpServletResponse response HttpServletResponse response
) throws ServletException, IOException { ) throws ServletException, IOException {
PrintWriter out = response.getWriter(); PrintWriter out = response.getWriter();
@ -100,13 +100,13 @@ public class TestHttpServer extends HttpServerFunctionalTest {
out.print('\n'); out.print('\n');
} }
out.close(); out.close();
} }
} }
@SuppressWarnings("serial") @SuppressWarnings("serial")
public static class EchoServlet extends HttpServlet { public static class EchoServlet extends HttpServlet {
@Override @Override
public void doGet(HttpServletRequest request, public void doGet(HttpServletRequest request,
HttpServletResponse response HttpServletResponse response
) throws ServletException, IOException { ) throws ServletException, IOException {
PrintWriter out = response.getWriter(); PrintWriter out = response.getWriter();
@ -122,7 +122,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
out.print('\n'); out.print('\n');
} }
out.close(); out.close();
} }
} }
@SuppressWarnings("serial") @SuppressWarnings("serial")
@ -139,7 +139,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
@SuppressWarnings("serial") @SuppressWarnings("serial")
public static class HtmlContentServlet extends HttpServlet { public static class HtmlContentServlet extends HttpServlet {
@Override @Override
public void doGet(HttpServletRequest request, public void doGet(HttpServletRequest request,
HttpServletResponse response HttpServletResponse response
) throws ServletException, IOException { ) throws ServletException, IOException {
response.setContentType("text/html"); response.setContentType("text/html");
@ -163,11 +163,11 @@ public class TestHttpServer extends HttpServerFunctionalTest {
baseUrl = getServerURL(server); baseUrl = getServerURL(server);
LOG.info("HTTP server started: "+ baseUrl); LOG.info("HTTP server started: "+ baseUrl);
} }
@AfterClass public static void cleanup() throws Exception { @AfterClass public static void cleanup() throws Exception {
server.stop(); server.stop();
} }
/** Test the maximum number of threads cannot be exceeded. */ /** Test the maximum number of threads cannot be exceeded. */
@Test public void testMaxThreads() throws Exception { @Test public void testMaxThreads() throws Exception {
int clientThreads = MAX_THREADS * 10; int clientThreads = MAX_THREADS * 10;
@ -199,24 +199,24 @@ public class TestHttpServer extends HttpServerFunctionalTest {
ready.await(); ready.await();
start.countDown(); start.countDown();
} }
@Test public void testEcho() throws Exception { @Test public void testEcho() throws Exception {
assertEquals("a:b\nc:d\n", assertEquals("a:b\nc:d\n",
readOutput(new URL(baseUrl, "/echo?a=b&c=d"))); readOutput(new URL(baseUrl, "/echo?a=b&c=d")));
assertEquals("a:b\nc&lt;:d\ne:&gt;\n", assertEquals("a:b\nc&lt;:d\ne:&gt;\n",
readOutput(new URL(baseUrl, "/echo?a=b&c<=d&e=>"))); readOutput(new URL(baseUrl, "/echo?a=b&c<=d&e=>")));
} }
/** Test the echo map servlet that uses getParameterMap. */ /** Test the echo map servlet that uses getParameterMap. */
@Test public void testEchoMap() throws Exception { @Test public void testEchoMap() throws Exception {
assertEquals("a:b\nc:d\n", assertEquals("a:b\nc:d\n",
readOutput(new URL(baseUrl, "/echomap?a=b&c=d"))); readOutput(new URL(baseUrl, "/echomap?a=b&c=d")));
assertEquals("a:b,&gt;\nc&lt;:d\n", assertEquals("a:b,&gt;\nc&lt;:d\n",
readOutput(new URL(baseUrl, "/echomap?a=b&c<=d&a=>"))); readOutput(new URL(baseUrl, "/echomap?a=b&c<=d&a=>")));
} }
/** /**
* Test that verifies headers can be up to 64K long. * Test that verifies headers can be up to 64K long.
* The test adds a 63K header leaving 1K for other headers. * The test adds a 63K header leaving 1K for other headers.
* This is because the header buffer setting is for ALL headers, * This is because the header buffer setting is for ALL headers,
* names and values included. */ * names and values included. */
@ -275,7 +275,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
* Dummy filter that mimics as an authentication filter. Obtains user identity * Dummy filter that mimics as an authentication filter. Obtains user identity
* from the request parameter user.name. Wraps around the request so that * from the request parameter user.name. Wraps around the request so that
* request.getRemoteUser() returns the user identity. * request.getRemoteUser() returns the user identity.
* *
*/ */
public static class DummyServletFilter implements Filter { public static class DummyServletFilter implements Filter {
@Override @Override
@ -317,7 +317,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
* Access a URL and get the corresponding return Http status code. The URL * Access a URL and get the corresponding return Http status code. The URL
* will be accessed as the passed user, by sending user.name request * will be accessed as the passed user, by sending user.name request
* parameter. * parameter.
* *
* @param urlstring * @param urlstring
* @param userName * @param userName
* @return * @return
@ -352,7 +352,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
* Verify the access for /logs, /stacks, /conf, /logLevel and /metrics * Verify the access for /logs, /stacks, /conf, /logLevel and /metrics
* servlets, when authentication filters are set, but authorization is not * servlets, when authentication filters are set, but authorization is not
* enabled. * enabled.
* @throws Exception * @throws Exception
*/ */
@Test @Test
@Ignore @Ignore
@ -388,7 +388,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
/** /**
* Verify the administrator access for /logs, /stacks, /conf, /logLevel and * Verify the administrator access for /logs, /stacks, /conf, /logLevel and
* /metrics servlets. * /metrics servlets.
* *
* @throws Exception * @throws Exception
*/ */
@Test @Test
@ -431,7 +431,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
} }
myServer.stop(); myServer.stop();
} }
@Test @Test
public void testRequestQuoterWithNull() throws Exception { public void testRequestQuoterWithNull() throws Exception {
HttpServletRequest request = Mockito.mock(HttpServletRequest.class); HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
@ -534,7 +534,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
// hang onto this one for a bit more testing // hang onto this one for a bit more testing
HttpServer myServer = checkBindAddress("localhost", 0, false); HttpServer myServer = checkBindAddress("localhost", 0, false);
HttpServer myServer2 = null; HttpServer myServer2 = null;
try { try {
int port = myServer.getConnectorAddress(0).getPort(); int port = myServer.getConnectorAddress(0).getPort();
// it's already in use, true = expect a higher port // it's already in use, true = expect a higher port
myServer2 = checkBindAddress("localhost", port, true); myServer2 = checkBindAddress("localhost", port, true);
@ -551,7 +551,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
} }
} }
} }
private HttpServer checkBindAddress(String host, int port, boolean findPort) private HttpServer checkBindAddress(String host, int port, boolean findPort)
throws Exception { throws Exception {
HttpServer server = createServer(host, port); HttpServer server = createServer(host, port);

View File

@ -83,16 +83,16 @@ public class TestPathFilter extends HttpServerFunctionalTest {
} }
} }
} }
/** access a url, ignoring some IOException such as the page does not exist */ /** access a url, ignoring some IOException such as the page does not exist */
static void access(String urlstring) throws IOException { static void access(String urlstring) throws IOException {
LOG.warn("access " + urlstring); LOG.warn("access " + urlstring);
URL url = new URL(urlstring); URL url = new URL(urlstring);
URLConnection connection = url.openConnection(); URLConnection connection = url.openConnection();
connection.connect(); connection.connect();
try { try {
BufferedReader in = new BufferedReader(new InputStreamReader( BufferedReader in = new BufferedReader(new InputStreamReader(
connection.getInputStream())); connection.getInputStream()));
@ -109,7 +109,7 @@ public class TestPathFilter extends HttpServerFunctionalTest {
@Test @Test
public void testPathSpecFilters() throws Exception { public void testPathSpecFilters() throws Exception {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
//start a http server with CountingFilter //start a http server with CountingFilter
conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY, conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
RecordingFilter.Initializer.class.getName()); RecordingFilter.Initializer.class.getName());
@ -125,11 +125,11 @@ public class TestPathFilter extends HttpServerFunctionalTest {
final String rootURL = "/"; final String rootURL = "/";
final String allURL = "/*"; final String allURL = "/*";
final String[] filteredUrls = {baseURL, baseSlashURL, addedURL, final String[] filteredUrls = {baseURL, baseSlashURL, addedURL,
addedSlashURL, longURL}; addedSlashURL, longURL};
final String[] notFilteredUrls = {rootURL, allURL}; final String[] notFilteredUrls = {rootURL, allURL};
// access the urls and verify our paths specs got added to the // access the urls and verify our paths specs got added to the
// filters // filters
final String prefix = "http://" final String prefix = "http://"
+ NetUtils.getHostPortString(http.getConnectorAddress(0)); + NetUtils.getHostPortString(http.getConnectorAddress(0));
@ -145,7 +145,7 @@ public class TestPathFilter extends HttpServerFunctionalTest {
} }
LOG.info("RECORDS = " + RECORDS); LOG.info("RECORDS = " + RECORDS);
//verify records //verify records
for(int i = 0; i < filteredUrls.length; i++) { for(int i = 0; i < filteredUrls.length; i++) {
assertTrue(RECORDS.remove(filteredUrls[i])); assertTrue(RECORDS.remove(filteredUrls[i]));

View File

@ -35,10 +35,11 @@ import javax.servlet.http.HttpServletRequest;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.GenericTestUtils;
import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.util.StringUtils;
import org.junit.Assert;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -46,7 +47,7 @@ import org.junit.experimental.categories.Category;
@Category({MiscTests.class, SmallTests.class}) @Category({MiscTests.class, SmallTests.class})
public class TestServletFilter extends HttpServerFunctionalTest { public class TestServletFilter extends HttpServerFunctionalTest {
private static final Log LOG = LogFactory.getLog(HttpServer.class); private static final Log LOG = LogFactory.getLog(HttpServer.class);
static volatile String uri = null; static volatile String uri = null;
/** A very simple filter which record the uri filtered. */ /** A very simple filter which record the uri filtered. */
static public class SimpleFilter implements Filter { static public class SimpleFilter implements Filter {
@ -83,15 +84,21 @@ public class TestServletFilter extends HttpServerFunctionalTest {
} }
} }
} }
public static void assertExceptionContains(String string, Throwable t) {
String msg = t.getMessage();
Assert.assertTrue(
"Expected to find '" + string + "' but got unexpected exception:"
+ StringUtils.stringifyException(t), msg.contains(string));
}
/** access a url, ignoring some IOException such as the page does not exist */ /** access a url, ignoring some IOException such as the page does not exist */
static void access(String urlstring) throws IOException { static void access(String urlstring) throws IOException {
LOG.warn("access " + urlstring); LOG.warn("access " + urlstring);
URL url = new URL(urlstring); URL url = new URL(urlstring);
URLConnection connection = url.openConnection(); URLConnection connection = url.openConnection();
connection.connect(); connection.connect();
try { try {
BufferedReader in = new BufferedReader(new InputStreamReader( BufferedReader in = new BufferedReader(new InputStreamReader(
connection.getInputStream())); connection.getInputStream()));
@ -112,7 +119,7 @@ public class TestServletFilter extends HttpServerFunctionalTest {
// It's second class. Could comment it out if only failing test (as per @nkeywal sort of) // It's second class. Could comment it out if only failing test (as per @nkeywal sort of)
public void testServletFilter() throws Exception { public void testServletFilter() throws Exception {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
//start a http server with CountingFilter //start a http server with CountingFilter
conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY, conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
SimpleFilter.Initializer.class.getName()); SimpleFilter.Initializer.class.getName());
@ -124,12 +131,12 @@ public class TestServletFilter extends HttpServerFunctionalTest {
final String ajspURL = "/a.jsp"; final String ajspURL = "/a.jsp";
final String logURL = "/logs/a.log"; final String logURL = "/logs/a.log";
final String hadooplogoURL = "/static/hadoop-logo.jpg"; final String hadooplogoURL = "/static/hadoop-logo.jpg";
final String[] urls = {fsckURL, stacksURL, ajspURL, logURL, hadooplogoURL}; final String[] urls = {fsckURL, stacksURL, ajspURL, logURL, hadooplogoURL};
final Random ran = new Random(); final Random ran = new Random();
final int[] sequence = new int[50]; final int[] sequence = new int[50];
//generate a random sequence and update counts //generate a random sequence and update counts
for(int i = 0; i < sequence.length; i++) { for(int i = 0; i < sequence.length; i++) {
sequence[i] = ran.nextInt(urls.length); sequence[i] = ran.nextInt(urls.length);
} }
@ -153,7 +160,7 @@ public class TestServletFilter extends HttpServerFunctionalTest {
http.stop(); http.stop();
} }
} }
static public class ErrorFilter extends SimpleFilter { static public class ErrorFilter extends SimpleFilter {
@Override @Override
public void init(FilterConfig arg0) throws ServletException { public void init(FilterConfig arg0) throws ServletException {
@ -183,10 +190,10 @@ public class TestServletFilter extends HttpServerFunctionalTest {
http.start(); http.start();
fail("expecting exception"); fail("expecting exception");
} catch (IOException e) { } catch (IOException e) {
GenericTestUtils.assertExceptionContains("Problem starting http server", e); assertExceptionContains("Problem starting http server", e);
} }
} }
/** /**
* Similar to the above test case, except that it uses a different API to add the * Similar to the above test case, except that it uses a different API to add the
* filter. Regression test for HADOOP-8786. * filter. Regression test for HADOOP-8786.
@ -203,7 +210,7 @@ public class TestServletFilter extends HttpServerFunctionalTest {
http.start(); http.start();
fail("expecting exception"); fail("expecting exception");
} catch (IOException e) { } catch (IOException e) {
GenericTestUtils.assertExceptionContains("Unable to initialize WebAppContext", e); assertExceptionContains("Unable to initialize WebAppContext", e);
} }
} }

View File

@ -82,7 +82,7 @@ public class TestConfServlet extends TestCase {
ConfServlet.writeResponse(getTestConf(), sw, "xml"); ConfServlet.writeResponse(getTestConf(), sw, "xml");
String xml = sw.toString(); String xml = sw.toString();
DocumentBuilderFactory docBuilderFactory DocumentBuilderFactory docBuilderFactory
= DocumentBuilderFactory.newInstance(); = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = docBuilderFactory.newDocumentBuilder(); DocumentBuilder builder = docBuilderFactory.newDocumentBuilder();
Document doc = builder.parse(new InputSource(new StringReader(xml))); Document doc = builder.parse(new InputSource(new StringReader(xml)));

View File

@ -5,9 +5,9 @@
* The ASF licenses this file to You under the Apache License, Version 2.0 * The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with * (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at * the License. You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -50,41 +50,41 @@ public class TestJMXJsonServlet extends HttpServerFunctionalTest {
server.start(); server.start();
baseUrl = getServerURL(server); baseUrl = getServerURL(server);
} }
@AfterClass public static void cleanup() throws Exception { @AfterClass public static void cleanup() throws Exception {
server.stop(); server.stop();
} }
public static void assertReFind(String re, String value) { public static void assertReFind(String re, String value) {
Pattern p = Pattern.compile(re); Pattern p = Pattern.compile(re);
Matcher m = p.matcher(value); Matcher m = p.matcher(value);
assertTrue("'"+p+"' does not match "+value, m.find()); assertTrue("'"+p+"' does not match "+value, m.find());
} }
@Test public void testQuery() throws Exception { @Test public void testQuery() throws Exception {
String result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Runtime")); String result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Runtime"));
LOG.info("/jmx?qry=java.lang:type=Runtime RESULT: "+result); LOG.info("/jmx?qry=java.lang:type=Runtime RESULT: "+result);
assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Runtime\"", result); assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Runtime\"", result);
assertReFind("\"modelerType\"", result); assertReFind("\"modelerType\"", result);
result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Memory")); result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Memory"));
LOG.info("/jmx?qry=java.lang:type=Memory RESULT: "+result); LOG.info("/jmx?qry=java.lang:type=Memory RESULT: "+result);
assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result); assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
assertReFind("\"modelerType\"", result); assertReFind("\"modelerType\"", result);
result = readOutput(new URL(baseUrl, "/jmx")); result = readOutput(new URL(baseUrl, "/jmx"));
LOG.info("/jmx RESULT: "+result); LOG.info("/jmx RESULT: "+result);
assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result); assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
// test to get an attribute of a mbean // test to get an attribute of a mbean
result = readOutput(new URL(baseUrl, result = readOutput(new URL(baseUrl,
"/jmx?get=java.lang:type=Memory::HeapMemoryUsage")); "/jmx?get=java.lang:type=Memory::HeapMemoryUsage"));
LOG.info("/jmx RESULT: "+result); LOG.info("/jmx RESULT: "+result);
assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result); assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
assertReFind("\"committed\"\\s*:", result); assertReFind("\"committed\"\\s*:", result);
// negative test to get an attribute of a mbean // negative test to get an attribute of a mbean
result = readOutput(new URL(baseUrl, result = readOutput(new URL(baseUrl,
"/jmx?get=java.lang:type=Memory::")); "/jmx?get=java.lang:type=Memory::"));
LOG.info("/jmx RESULT: "+result); LOG.info("/jmx RESULT: "+result);
assertReFind("\"ERROR\"", result); assertReFind("\"ERROR\"", result);

View File

@ -45,30 +45,30 @@ public class TestStaticUserWebFilter {
ServerConfigurationKeys.HBASE_HTTP_STATIC_USER); ServerConfigurationKeys.HBASE_HTTP_STATIC_USER);
return mock; return mock;
} }
@Test @Test
public void testFilter() throws Exception { public void testFilter() throws Exception {
FilterConfig config = mockConfig("myuser"); FilterConfig config = mockConfig("myuser");
StaticUserFilter suf = new StaticUserFilter(); StaticUserFilter suf = new StaticUserFilter();
suf.init(config); suf.init(config);
ArgumentCaptor<HttpServletRequestWrapper> wrapperArg = ArgumentCaptor<HttpServletRequestWrapper> wrapperArg =
ArgumentCaptor.forClass(HttpServletRequestWrapper.class); ArgumentCaptor.forClass(HttpServletRequestWrapper.class);
FilterChain chain = mock(FilterChain.class); FilterChain chain = mock(FilterChain.class);
suf.doFilter(mock(HttpServletRequest.class), mock(ServletResponse.class), suf.doFilter(mock(HttpServletRequest.class), mock(ServletResponse.class),
chain); chain);
Mockito.verify(chain).doFilter(wrapperArg.capture(), Mockito.<ServletResponse>anyObject()); Mockito.verify(chain).doFilter(wrapperArg.capture(), Mockito.<ServletResponse>anyObject());
HttpServletRequestWrapper wrapper = wrapperArg.getValue(); HttpServletRequestWrapper wrapper = wrapperArg.getValue();
assertEquals("myuser", wrapper.getUserPrincipal().getName()); assertEquals("myuser", wrapper.getUserPrincipal().getName());
assertEquals("myuser", wrapper.getRemoteUser()); assertEquals("myuser", wrapper.getRemoteUser());
suf.destroy(); suf.destroy();
} }
@Test @Test
public void testOldStyleConfiguration() { public void testOldStyleConfiguration() {
Configuration conf = new Configuration(); Configuration conf = new Configuration();

View File

@ -69,8 +69,8 @@ public class KeyStoreTestUtil {
* @param algorithm the signing algorithm, eg "SHA1withRSA" * @param algorithm the signing algorithm, eg "SHA1withRSA"
* @return the self-signed certificate * @return the self-signed certificate
*/ */
public static X509Certificate generateCertificate(String dn, KeyPair pair, int days, String algorithm) public static X509Certificate generateCertificate(String dn, KeyPair pair, int days, String algorithm)
throws CertificateEncodingException, InvalidKeyException, IllegalStateException, throws CertificateEncodingException, InvalidKeyException, IllegalStateException,
NoSuchProviderException, NoSuchAlgorithmException, SignatureException { NoSuchProviderException, NoSuchAlgorithmException, SignatureException {
Date from = new Date(); Date from = new Date();
Date to = new Date(from.getTime() + days * 86400000l); Date to = new Date(from.getTime() + days * 86400000l);
@ -127,7 +127,7 @@ public class KeyStoreTestUtil {
/** /**
* Creates a keystore with a single key and saves it to a file. * Creates a keystore with a single key and saves it to a file.
* *
* @param filename String file to save * @param filename String file to save
* @param password String store password to set on keystore * @param password String store password to set on keystore
* @param keyPassword String key password to set on key * @param keyPassword String key password to set on key
@ -185,7 +185,7 @@ public class KeyStoreTestUtil {
* SSLFactory. This includes keys, certs, keystores, truststores, the server * SSLFactory. This includes keys, certs, keystores, truststores, the server
* SSL configuration file, the client SSL configuration file, and the master * SSL configuration file, the client SSL configuration file, and the master
* configuration file read by the SSLFactory. * configuration file read by the SSLFactory.
* *
* @param keystoresDir String directory to save keystores * @param keystoresDir String directory to save keystores
* @param sslConfDir String directory to save SSL configuration files * @param sslConfDir String directory to save SSL configuration files
* @param conf Configuration master configuration to be used by an SSLFactory, * @param conf Configuration master configuration to be used by an SSLFactory,
@ -244,7 +244,7 @@ public class KeyStoreTestUtil {
/** /**
* Creates SSL configuration for a client. * Creates SSL configuration for a client.
* *
* @param clientKS String client keystore file * @param clientKS String client keystore file
* @param password String store password, or null to avoid setting store * @param password String store password, or null to avoid setting store
* password * password
@ -262,7 +262,7 @@ public class KeyStoreTestUtil {
/** /**
* Creates SSL configuration for a server. * Creates SSL configuration for a server.
* *
* @param serverKS String server keystore file * @param serverKS String server keystore file
* @param password String store password, or null to avoid setting store * @param password String store password, or null to avoid setting store
* password * password
@ -280,7 +280,7 @@ public class KeyStoreTestUtil {
/** /**
* Creates SSL configuration. * Creates SSL configuration.
* *
* @param mode SSLFactory.Mode mode to configure * @param mode SSLFactory.Mode mode to configure
* @param keystore String keystore file * @param keystore String keystore file
* @param password String store password, or null to avoid setting store * @param password String store password, or null to avoid setting store
@ -325,7 +325,7 @@ public class KeyStoreTestUtil {
/** /**
* Saves configuration to a file. * Saves configuration to a file.
* *
* @param file File to save * @param file File to save
* @param conf Configuration contents to write to file * @param conf Configuration contents to write to file
* @throws IOException if there is an I/O error saving the file * @throws IOException if there is an I/O error saving the file

View File

@ -0,0 +1,68 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Define some default values that can be overridden by system properties
hbase.root.logger=INFO,console
hbase.log.dir=.
hbase.log.file=hbase.log
# Define the root logger to the system property "hbase.root.logger".
log4j.rootLogger=${hbase.root.logger}
# Logging Threshold
log4j.threshold=ALL
#
# Daily Rolling File Appender
#
log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
# Rollver at midnight
log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
# 30-day backup
#log4j.appender.DRFA.MaxBackupIndex=30
log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
# Debugging Pattern format
log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
#
# console
# Add "console" to rootlogger above if you want to use this
#
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
# Custom Logging levels
#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
log4j.logger.org.apache.hadoop=WARN
log4j.logger.org.apache.zookeeper=ERROR
log4j.logger.org.apache.hadoop.hbase=DEBUG
#These settings are workarounds against spurious logs from the minicluster.
#See HBASE-4709
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
log4j.logger.org.apache.hadoop.metrics2.util.MBeans=WARN
# Enable this to get detailed connection error/retry logging.
# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE

View File

@ -0,0 +1,21 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Test CSS file for content type handling - empty, since we just check
* returned content type!
*/

View File

@ -0,0 +1,21 @@
<?xml version="1.0" encoding="UTF-8"?><%!
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
%>
<%@ page contentType="text/html; charset=UTF-8" %>
Hello world!

View File

@ -20,10 +20,8 @@ package org.apache.hadoop.hbase.rest;
import java.lang.management.ManagementFactory; import java.lang.management.ManagementFactory;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.ArrayBlockingQueue;
@ -46,7 +44,7 @@ import org.apache.hadoop.hbase.rest.filter.GzipFilter;
import org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter; import org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter;
import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.util.DNS; import org.apache.hadoop.hbase.util.DNS;
import org.apache.hadoop.hbase.util.HttpServerUtil; import org.apache.hadoop.hbase.http.HttpServerUtil;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.hadoop.hbase.util.VersionInfo;

View File

@ -24,7 +24,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.util.HttpServerUtil; import org.apache.hadoop.hbase.http.HttpServerUtil;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.eclipse.jetty.server.HttpConfiguration; import org.eclipse.jetty.server.HttpConfiguration;
@ -99,7 +99,7 @@ public class HBaseRESTTestingUtility {
// get the port // get the port
testServletPort = ((ServerConnector)server.getConnectors()[0]).getLocalPort(); testServletPort = ((ServerConnector)server.getConnectors()[0]).getLocalPort();
LOG.info("started " + server.getClass().getName() + " on port " + LOG.info("started " + server.getClass().getName() + " on port " +
testServletPort); testServletPort);
} }

View File

@ -359,6 +359,16 @@
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId> <artifactId>hbase-common</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-http</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-http</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<!--Needed by the visiblity tags and acl CPEP things <!--Needed by the visiblity tags and acl CPEP things
in here in hbase-server (that should be out in hbase-endpoints in here in hbase-server (that should be out in hbase-endpoints
@ -458,50 +468,15 @@
<groupId>org.eclipse.jetty</groupId> <groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId> <artifactId>jetty-servlet</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util-ajax</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-jsp</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.eclipse.jetty</groupId> <groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId> <artifactId>jetty-webapp</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-security</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
</dependency>
<dependency> <dependency>
<!--For JspC used in ant task--> <!--For JspC used in ant task-->
<groupId>org.glassfish.web</groupId> <groupId>org.glassfish.web</groupId>
<artifactId>javax.servlet.jsp</artifactId> <artifactId>javax.servlet.jsp</artifactId>
</dependency> </dependency>
<!-- Specifically needed for jetty-jsp, included
to bypass version scanning that hits a bad repo
see HBASE-18831 -->
<dependency>
<groupId>org.glassfish</groupId>
<artifactId>javax.el</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.codehaus.jettison</groupId> <groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId> <artifactId>jettison</artifactId>

View File

@ -0,0 +1,60 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.util.JSONBean;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.management.ManagementFactory;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
/**
* Utility for doing JSON and MBeans.
*/
public class DumpRegionServerMetrics {
/**
* Dump out a subset of regionserver mbeans only, not all of them, as json on System.out.
*/
public static String dumpMetrics() throws MalformedObjectNameException, IOException {
StringWriter sw = new StringWriter(1024 * 100); // Guess this size
try (PrintWriter writer = new PrintWriter(sw)) {
JSONBean dumper = new JSONBean();
try (JSONBean.Writer jsonBeanWriter = dumper.open(writer)) {
MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
jsonBeanWriter.write(mbeanServer,
new ObjectName("java.lang:type=Memory"), null, false);
jsonBeanWriter.write(mbeanServer,
new ObjectName("Hadoop:service=HBase,name=RegionServer,sub=IPC"), null, false);
jsonBeanWriter.write(mbeanServer,
new ObjectName("Hadoop:service=HBase,name=RegionServer,sub=Replication"), null, false);
jsonBeanWriter.write(mbeanServer,
new ObjectName("Hadoop:service=HBase,name=RegionServer,sub=Server"), null, false);
}
}
sw.close();
return sw.toString();
}
public static void main(String[] args) throws IOException, MalformedObjectNameException {
String str = dumpMetrics();
System.out.println(str);
}
}

View File

@ -147,7 +147,6 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSTableDescriptors; import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HasThread; import org.apache.hadoop.hbase.util.HasThread;
import org.apache.hadoop.hbase.util.JSONBean;
import org.apache.hadoop.hbase.util.JvmPauseMonitor; import org.apache.hadoop.hbase.util.JvmPauseMonitor;
import org.apache.hadoop.hbase.util.NettyEventLoopGroupConfig; import org.apache.hadoop.hbase.util.NettyEventLoopGroupConfig;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
@ -2412,7 +2411,7 @@ public class HRegionServer extends HasThread implements
CoprocessorHost.getLoadedCoprocessors()); CoprocessorHost.getLoadedCoprocessors());
// Try and dump metrics if abort -- might give clue as to how fatal came about.... // Try and dump metrics if abort -- might give clue as to how fatal came about....
try { try {
LOG.info("Dump of metrics as JSON on abort: " + JSONBean.dumpRegionServerMetrics()); LOG.info("Dump of metrics as JSON on abort: " + DumpRegionServerMetrics.dumpMetrics());
} catch (MalformedObjectNameException | IOException e) { } catch (MalformedObjectNameException | IOException e) {
LOG.warn("Failed dumping metrics", e); LOG.warn("Failed dumping metrics", e);
} }

View File

@ -35,7 +35,6 @@ import java.util.regex.Pattern;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.impl.Log4JLogger; import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.apache.log4j.Layout; import org.apache.log4j.Layout;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
@ -70,14 +69,14 @@ public abstract class GenericTestUtils {
public static int uniqueSequenceId() { public static int uniqueSequenceId() {
return sequence.incrementAndGet(); return sequence.incrementAndGet();
} }
/** /**
* Assert that a given file exists. * Assert that a given file exists.
*/ */
public static void assertExists(File f) { public static void assertExists(File f) {
Assert.assertTrue("File " + f + " should exist", f.exists()); Assert.assertTrue("File " + f + " should exist", f.exists());
} }
/** /**
* List all of the files in 'dir' that match the regex 'pattern'. * List all of the files in 'dir' that match the regex 'pattern'.
* Then check that this list is identical to 'expectedMatches'. * Then check that this list is identical to 'expectedMatches'.
@ -85,7 +84,7 @@ public abstract class GenericTestUtils {
*/ */
public static void assertGlobEquals(File dir, String pattern, public static void assertGlobEquals(File dir, String pattern,
String ... expectedMatches) throws IOException { String ... expectedMatches) throws IOException {
Set<String> found = Sets.newTreeSet(); Set<String> found = Sets.newTreeSet();
for (File f : FileUtil.listFiles(dir)) { for (File f : FileUtil.listFiles(dir)) {
if (f.getName().matches(pattern)) { if (f.getName().matches(pattern)) {
@ -98,13 +97,6 @@ public abstract class GenericTestUtils {
Joiner.on(",").join(expectedSet), Joiner.on(",").join(expectedSet),
Joiner.on(",").join(found)); Joiner.on(",").join(found));
} }
public static void assertExceptionContains(String string, Throwable t) {
String msg = t.getMessage();
Assert.assertTrue(
"Expected to find '" + string + "' but got unexpected exception:"
+ StringUtils.stringifyException(t), msg.contains(string));
}
public static void waitFor(Supplier<Boolean> check, public static void waitFor(Supplier<Boolean> check,
int checkEveryMillis, int waitForMillis) int checkEveryMillis, int waitForMillis)
@ -116,26 +108,26 @@ public abstract class GenericTestUtils {
if (result) { if (result) {
return; return;
} }
Thread.sleep(checkEveryMillis); Thread.sleep(checkEveryMillis);
} while (Time.now() - st < waitForMillis); } while (Time.now() - st < waitForMillis);
throw new TimeoutException("Timed out waiting for condition. " + throw new TimeoutException("Timed out waiting for condition. " +
"Thread diagnostics:\n" + "Thread diagnostics:\n" +
TimedOutTestsListener.buildThreadDiagnosticString()); TimedOutTestsListener.buildThreadDiagnosticString());
} }
public static class LogCapturer { public static class LogCapturer {
private StringWriter sw = new StringWriter(); private StringWriter sw = new StringWriter();
private WriterAppender appender; private WriterAppender appender;
private Logger logger; private Logger logger;
public static LogCapturer captureLogs(Log l) { public static LogCapturer captureLogs(Log l) {
Logger logger = ((Log4JLogger)l).getLogger(); Logger logger = ((Log4JLogger)l).getLogger();
LogCapturer c = new LogCapturer(logger); LogCapturer c = new LogCapturer(logger);
return c; return c;
} }
private LogCapturer(Logger logger) { private LogCapturer(Logger logger) {
this.logger = logger; this.logger = logger;
@ -143,36 +135,36 @@ public abstract class GenericTestUtils {
WriterAppender wa = new WriterAppender(layout, sw); WriterAppender wa = new WriterAppender(layout, sw);
logger.addAppender(wa); logger.addAppender(wa);
} }
public String getOutput() { public String getOutput() {
return sw.toString(); return sw.toString();
} }
public void stopCapturing() { public void stopCapturing() {
logger.removeAppender(appender); logger.removeAppender(appender);
} }
} }
/** /**
* Mockito answer helper that triggers one latch as soon as the * Mockito answer helper that triggers one latch as soon as the
* method is called, then waits on another before continuing. * method is called, then waits on another before continuing.
*/ */
public static class DelayAnswer implements Answer<Object> { public static class DelayAnswer implements Answer<Object> {
private final Log LOG; private final Log LOG;
private final CountDownLatch fireLatch = new CountDownLatch(1); private final CountDownLatch fireLatch = new CountDownLatch(1);
private final CountDownLatch waitLatch = new CountDownLatch(1); private final CountDownLatch waitLatch = new CountDownLatch(1);
private final CountDownLatch resultLatch = new CountDownLatch(1); private final CountDownLatch resultLatch = new CountDownLatch(1);
private final AtomicInteger fireCounter = new AtomicInteger(0); private final AtomicInteger fireCounter = new AtomicInteger(0);
private final AtomicInteger resultCounter = new AtomicInteger(0); private final AtomicInteger resultCounter = new AtomicInteger(0);
// Result fields set after proceed() is called. // Result fields set after proceed() is called.
private volatile Throwable thrown; private volatile Throwable thrown;
private volatile Object returnValue; private volatile Object returnValue;
public DelayAnswer(Log log) { public DelayAnswer(Log log) {
this.LOG = log; this.LOG = log;
} }
@ -183,7 +175,7 @@ public abstract class GenericTestUtils {
public void waitForCall() throws InterruptedException { public void waitForCall() throws InterruptedException {
fireLatch.await(); fireLatch.await();
} }
/** /**
* Tell the method to proceed. * Tell the method to proceed.
* This should only be called after waitForCall() * This should only be called after waitForCall()
@ -191,7 +183,7 @@ public abstract class GenericTestUtils {
public void proceed() { public void proceed() {
waitLatch.countDown(); waitLatch.countDown();
} }
@Override @Override
public Object answer(InvocationOnMock invocation) throws Throwable { public Object answer(InvocationOnMock invocation) throws Throwable {
LOG.info("DelayAnswer firing fireLatch"); LOG.info("DelayAnswer firing fireLatch");
@ -220,7 +212,7 @@ public abstract class GenericTestUtils {
resultLatch.countDown(); resultLatch.countDown();
} }
} }
/** /**
* After calling proceed(), this will wait until the call has * After calling proceed(), this will wait until the call has
* completed and a result has been returned to the caller. * completed and a result has been returned to the caller.
@ -228,7 +220,7 @@ public abstract class GenericTestUtils {
public void waitForResult() throws InterruptedException { public void waitForResult() throws InterruptedException {
resultLatch.await(); resultLatch.await();
} }
/** /**
* After the call has gone through, return any exception that * After the call has gone through, return any exception that
* was thrown, or null if no exception was thrown. * was thrown, or null if no exception was thrown.
@ -236,7 +228,7 @@ public abstract class GenericTestUtils {
public Throwable getThrown() { public Throwable getThrown() {
return thrown; return thrown;
} }
/** /**
* After the call has gone through, return the call's return value, * After the call has gone through, return the call's return value,
* or null in case it was void or an exception was thrown. * or null in case it was void or an exception was thrown.
@ -244,20 +236,20 @@ public abstract class GenericTestUtils {
public Object getReturnValue() { public Object getReturnValue() {
return returnValue; return returnValue;
} }
public int getFireCount() { public int getFireCount() {
return fireCounter.get(); return fireCounter.get();
} }
public int getResultCount() { public int getResultCount() {
return resultCounter.get(); return resultCounter.get();
} }
} }
/** /**
* An Answer implementation that simply forwards all calls through * An Answer implementation that simply forwards all calls through
* to a delegate. * to a delegate.
* *
* This is useful as the default Answer for a mock object, to create * This is useful as the default Answer for a mock object, to create
* something like a spy on an RPC proxy. For example: * something like a spy on an RPC proxy. For example:
* <code> * <code>
@ -268,14 +260,14 @@ public abstract class GenericTestUtils {
* ... * ...
* </code> * </code>
*/ */
public static class DelegateAnswer implements Answer<Object> { public static class DelegateAnswer implements Answer<Object> {
private final Object delegate; private final Object delegate;
private final Log log; private final Log log;
public DelegateAnswer(Object delegate) { public DelegateAnswer(Object delegate) {
this(null, delegate); this(null, delegate);
} }
public DelegateAnswer(Log log, Object delegate) { public DelegateAnswer(Log log, Object delegate) {
this.log = log; this.log = log;
this.delegate = delegate; this.delegate = delegate;
@ -305,11 +297,11 @@ public abstract class GenericTestUtils {
public static class SleepAnswer implements Answer<Object> { public static class SleepAnswer implements Answer<Object> {
private final int maxSleepTime; private final int maxSleepTime;
private static Random r = new Random(); private static Random r = new Random();
public SleepAnswer(int maxSleepTime) { public SleepAnswer(int maxSleepTime) {
this.maxSleepTime = maxSleepTime; this.maxSleepTime = maxSleepTime;
} }
@Override @Override
public Object answer(InvocationOnMock invocation) throws Throwable { public Object answer(InvocationOnMock invocation) throws Throwable {
boolean interrupted = false; boolean interrupted = false;
@ -333,11 +325,11 @@ public abstract class GenericTestUtils {
" but got:\n" + output, " but got:\n" + output,
Pattern.compile(pattern).matcher(output).find()); Pattern.compile(pattern).matcher(output).find());
} }
public static void assertValueNear(long expected, long actual, long allowedError) { public static void assertValueNear(long expected, long actual, long allowedError) {
assertValueWithinRange(expected - allowedError, expected + allowedError, actual); assertValueWithinRange(expected - allowedError, expected + allowedError, actual);
} }
public static void assertValueWithinRange(long expectedMin, long expectedMax, public static void assertValueWithinRange(long expectedMin, long expectedMax,
long actual) { long actual) {
Assert.assertTrue("Expected " + actual + " to be in range (" + expectedMin + "," Assert.assertTrue("Expected " + actual + " to be in range (" + expectedMin + ","
@ -352,7 +344,7 @@ public abstract class GenericTestUtils {
public static void assertNoThreadsMatching(String regex) { public static void assertNoThreadsMatching(String regex) {
Pattern pattern = Pattern.compile(regex); Pattern pattern = Pattern.compile(regex);
ThreadMXBean threadBean = ManagementFactory.getThreadMXBean(); ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
ThreadInfo[] infos = threadBean.getThreadInfo(threadBean.getAllThreadIds(), 20); ThreadInfo[] infos = threadBean.getThreadInfo(threadBean.getAllThreadIds(), 20);
for (ThreadInfo info : infos) { for (ThreadInfo info : infos) {
if (info == null) continue; if (info == null) continue;

View File

@ -141,6 +141,10 @@
<groupId>org.glassfish.web</groupId> <groupId>org.glassfish.web</groupId>
<artifactId>javax.servlet.jsp</artifactId> <artifactId>javax.servlet.jsp</artifactId>
</exclusion> </exclusion>
<exclusion>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-server</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>org.glassfish.jersey.containers</groupId> <groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId> <artifactId>jersey-container-servlet-core</artifactId>

23
pom.xml
View File

@ -65,6 +65,7 @@
<module>hbase-replication</module> <module>hbase-replication</module>
<module>hbase-mapreduce</module> <module>hbase-mapreduce</module>
<module>hbase-resource-bundle</module> <module>hbase-resource-bundle</module>
<module>hbase-http</module>
<module>hbase-server</module> <module>hbase-server</module>
<module>hbase-thrift</module> <module>hbase-thrift</module>
<module>hbase-shell</module> <module>hbase-shell</module>
@ -1580,6 +1581,18 @@
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency>
<artifactId>hbase-http</artifactId>
<groupId>org.apache.hbase</groupId>
<version>${project.version}</version>
</dependency>
<dependency>
<artifactId>hbase-http</artifactId>
<groupId>org.apache.hbase</groupId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<artifactId>hbase-server</artifactId> <artifactId>hbase-server</artifactId>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
@ -1923,6 +1936,11 @@
<artifactId>jersey-client</artifactId> <artifactId>jersey-client</artifactId>
<version>${jersey.version}</version> <version>${jersey.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-server</artifactId>
<version>${jersey.version}</version>
</dependency>
<dependency> <dependency>
<!--This lib has JspC in it. Needed precompiling jsps in hbase-rest, etc.--> <!--This lib has JspC in it. Needed precompiling jsps in hbase-rest, etc.-->
<groupId>org.glassfish.web</groupId> <groupId>org.glassfish.web</groupId>
@ -1984,6 +2002,11 @@
<version>${bouncycastle.version}</version> <version>${bouncycastle.version}</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>org.apache.kerby</groupId>
<artifactId>kerb-core</artifactId>
<version>${kerby.version}</version>
</dependency>
<dependency> <dependency>
<groupId>org.apache.kerby</groupId> <groupId>org.apache.kerby</groupId>
<artifactId>kerb-client</artifactId> <artifactId>kerb-client</artifactId>