HBASE-8224 Publish hbase build against h1 and h2 adding '-hadoop1' or '-hadoop2' to version string

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1509813 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2013-08-02 19:01:34 +00:00
parent 55501f8eb3
commit 34c8e0ab16
13 changed files with 418 additions and 150 deletions

View File

@ -154,10 +154,11 @@
activation property as the parent Hadoop 1.0.x profile to make sure it gets run at activation property as the parent Hadoop 1.0.x profile to make sure it gets run at
the same time. --> the same time. -->
<profile> <profile>
<id>hadoop-1.0</id> <id>hadoop-1.1</id>
<activation> <activation>
<property> <property>
<name>!hadoop.profile</name> <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h1--><name>!hadoop.profile</name>
</property> </property>
</activation> </activation>
<dependencies> <dependencies>
@ -176,14 +177,22 @@
<id>hadoop-2.0</id> <id>hadoop-2.0</id>
<activation> <activation>
<property> <property>
<name>hadoop.profile</name> <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<value>2.0</value> <!--h2--><name>hadoop.profile</name><value>2.0</value>
</property> </property>
</activation> </activation>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId> <artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>

View File

@ -187,14 +187,27 @@
<groupId>commons-logging</groupId> <groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId> <artifactId>commons-logging</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>commons-codec</groupId>
<artifactId>slf4j-api</artifactId> <artifactId>commons-codec</artifactId>
</dependency> <scope>compile</scope>
<dependency> </dependency>
<groupId>org.slf4j</groupId> <dependency>
<artifactId>slf4j-log4j12</artifactId> <groupId>commons-lang</groupId>
</dependency> <artifactId>commons-lang</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>3.2.1</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<scope>compile</scope>
</dependency>
</dependencies> </dependencies>
<profiles> <profiles>
@ -218,7 +231,8 @@
<id>hadoop-1.1</id> <id>hadoop-1.1</id>
<activation> <activation>
<property> <property>
<name>!hadoop.profile</name> <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h1--><name>!hadoop.profile</name>
</property> </property>
</activation> </activation>
<dependencies> <dependencies>
@ -260,18 +274,18 @@
<id>hadoop-2.0</id> <id>hadoop-2.0</id>
<activation> <activation>
<property> <property>
<name>hadoop.profile</name> <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<value>2.0</value> <!--h2--><name>hadoop.profile</name><value>2.0</value>
</property> </property>
</activation> </activation>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId> <artifactId>hadoop-annotations</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId> <artifactId>hadoop-common</artifactId>
</dependency> </dependency>
</dependencies> </dependencies>
<build> <build>

View File

@ -27,10 +27,10 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
@ -44,10 +44,8 @@ import org.apache.hadoop.classification.InterfaceStability;
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class JVM public class JVM {
{ private static final Log LOG = LogFactory.getLog(JVM.class);
static final Logger LOG = LoggerFactory.getLogger(JVM.class);
private OperatingSystemMXBean osMbean; private OperatingSystemMXBean osMbean;
private static final boolean ibmvendor = private static final boolean ibmvendor =

View File

@ -63,12 +63,34 @@
</plugins> </plugins>
</build> </build>
<dependencies> <dependencies>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId> <artifactId>hbase-common</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId> <artifactId>hbase-server</artifactId>
@ -84,12 +106,8 @@
<artifactId>libthrift</artifactId> <artifactId>libthrift</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>commons-logging</groupId>
<artifactId>slf4j-api</artifactId> <artifactId>commons-logging</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency> </dependency>
</dependencies> </dependencies>
<profiles> <profiles>
@ -117,7 +135,8 @@ if we can combine these profiles somehow -->
<id>hadoop-1.1</id> <id>hadoop-1.1</id>
<activation> <activation>
<property> <property>
<name>!hadoop.profile</name> <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h1--><name>!hadoop.profile</name>
</property> </property>
</activation> </activation>
<dependencies> <dependencies>
@ -139,22 +158,18 @@ if we can combine these profiles somehow -->
<id>hadoop-2.0</id> <id>hadoop-2.0</id>
<activation> <activation>
<property> <property>
<name>hadoop.profile</name> <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<value>2.0</value> <!--h2--><name>hadoop.profile</name><value>2.0</value>
</property> </property>
</activation> </activation>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId> <artifactId>hadoop-mapreduce-client-core</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId> <artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
</dependency> </dependency>
</dependencies> </dependencies>
<build> <build>

View File

@ -117,10 +117,6 @@ limitations under the License.
<groupId>com.yammer.metrics</groupId> <groupId>com.yammer.metrics</groupId>
<artifactId>metrics-core</artifactId> <artifactId>metrics-core</artifactId>
</dependency> </dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-test</artifactId> <artifactId>hadoop-test</artifactId>
@ -128,6 +124,10 @@ limitations under the License.
<optional>true</optional> <optional>true</optional>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</dependency>
</dependencies> </dependencies>
<profiles> <profiles>

View File

@ -140,7 +140,7 @@ limitations under the License.
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId> <artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoop-two.version}</version> <version>${hadoop-two.version}</version>
</dependency> </dependency>
<dependency> <dependency>
@ -157,10 +157,6 @@ limitations under the License.
<groupId>com.yammer.metrics</groupId> <groupId>com.yammer.metrics</groupId>
<artifactId>metrics-core</artifactId> <artifactId>metrics-core</artifactId>
</dependency> </dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<!-- This was marked as test dep in earlier pom, but was scoped compile. Where <!-- This was marked as test dep in earlier pom, but was scoped compile. Where
do we actually need it? --> do we actually need it? -->
<dependency> <dependency>
@ -168,6 +164,18 @@ limitations under the License.
<artifactId>hadoop-minicluster</artifactId> <artifactId>hadoop-minicluster</artifactId>
<version>${hadoop-two.version}</version> <version>${hadoop-two.version}</version>
</dependency> </dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
</dependencies> </dependencies>
<profiles> <profiles>

View File

@ -137,11 +137,44 @@
<dependencies> <dependencies>
<!-- Intra-project dependencies --> <!-- Intra-project dependencies -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
<type>jar</type>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId> <artifactId>hbase-common</artifactId>
<type>test-jar</type> <type>test-jar</type>
</dependency> </dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-math</artifactId>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId> <artifactId>hbase-server</artifactId>
@ -175,6 +208,10 @@
<type>test-jar</type> <type>test-jar</type>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>org.cloudera.htrace</groupId>
<artifactId>htrace</artifactId>
</dependency>
<!-- General dependencies --> <!-- General dependencies -->
</dependencies> </dependencies>
@ -200,7 +237,8 @@
<id>hadoop-1.1</id> <id>hadoop-1.1</id>
<activation> <activation>
<property> <property>
<name>!hadoop.profile</name> <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h1--><name>!hadoop.profile</name>
</property> </property>
</activation> </activation>
<dependencies> <dependencies>
@ -242,14 +280,19 @@
<id>hadoop-2.0</id> <id>hadoop-2.0</id>
<activation> <activation>
<property> <property>
<name>hadoop.profile</name> <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<value>2.0</value> <!--h2--><name>hadoop.profile</name><value>2.0</value>
</property> </property>
</activation> </activation>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId> <artifactId>hadoop-mapreduce-client-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<type>test-jar</type>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
@ -257,7 +300,7 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId> <artifactId>hadoop-common</artifactId>
</dependency> </dependency>
</dependencies> </dependencies>
<build> <build>

View File

@ -78,6 +78,105 @@
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId> <artifactId>hbase-common</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>${compat.module}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</dependency>
</dependencies> </dependencies>
<profiles>
<!-- Profiles for building against different hadoop versions -->
<profile>
<id>hadoop-1.1</id>
<activation>
<property>
<!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h1--><name>!hadoop.profile</name>
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
</dependency>
</dependencies>
</profile>
<profile>
<id>hadoop-1.0</id>
<activation>
<property>
<name>hadoop.profile</name>
<value>1.0</value>
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
</dependency>
</dependencies>
</profile>
<!--
profile for building against Hadoop 2.0.0-alpha. Activate using:
mvn -Dhadoop.profile=2.0
-->
<profile>
<id>hadoop-2.0</id>
<activation>
<property>
<!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h2--><name>hadoop.profile</name><value>2.0</value>
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
</dependency>
</dependencies>
</profile>
<!--
profile for building against Hadoop 3.0.x. Activate using:
mvn -Dhadoop.profile=3.0
-->
<profile>
<id>hadoop-3.0</id>
<activation>
<property>
<name>hadoop.profile</name>
<value>3.0</value>
</property>
</activation>
<properties>
<hadoop.version>3.0-SNAPSHOT</hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
</dependency>
</dependencies>
</profile>
</profiles>
</project> </project>

View File

@ -299,6 +299,14 @@
<artifactId>hbase-common</artifactId> <artifactId>hbase-common</artifactId>
<type>test-jar</type> <type>test-jar</type>
</dependency> </dependency>
<dependency>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId> <artifactId>hbase-hadoop-compat</artifactId>
@ -371,6 +379,10 @@
<groupId>org.apache.zookeeper</groupId> <groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId> <artifactId>zookeeper</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.thrift</groupId> <groupId>org.apache.thrift</groupId>
<artifactId>libthrift</artifactId> <artifactId>libthrift</artifactId>
@ -419,14 +431,6 @@
<groupId>org.codehaus.jackson</groupId> <groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId> <artifactId>jackson-xc</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>
<dependency> <dependency>
<groupId>tomcat</groupId> <groupId>tomcat</groupId>
<artifactId>jasper-compiler</artifactId> <artifactId>jasper-compiler</artifactId>
@ -548,7 +552,8 @@
<id>hadoop-1.1</id> <id>hadoop-1.1</id>
<activation> <activation>
<property> <property>
<name>!hadoop.profile</name> <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h1--><name>!hadoop.profile</name>
</property> </property>
</activation> </activation>
<dependencies> <dependencies>
@ -589,11 +594,37 @@
<id>hadoop-2.0</id> <id>hadoop-2.0</id>
<activation> <activation>
<property> <property>
<name>hadoop.profile</name> <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<value>2.0</value> <!--h2--><name>hadoop.profile</name><value>2.0</value>
</property> </property>
</activation> </activation>
<dependencies> <dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<type>test-jar</type>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId> <artifactId>hadoop-client</artifactId>

View File

@ -146,7 +146,7 @@
</div> </div>
<p> <p>
Note that all exceptions that you expect to be thrown must be caught and then rethrown as a Note that all exceptions that you expect to be thrown must be caught and then rethrown as a
{@link org.apache.hadoop.hbase.exceptions.ConstraintException}. This way, you can be sure that a {@link org.apache.hadoop.hbase.constraint.ConstraintException}. This way, you can be sure that a
{@link org.apache.hadoop.hbase.client.Put} fails for an expected reason, rather than for any reason. {@link org.apache.hadoop.hbase.client.Put} fails for an expected reason, rather than for any reason.
For example, an {@link java.lang.OutOfMemoryError} is probably indicative of an inherent problem in For example, an {@link java.lang.OutOfMemoryError} is probably indicative of an inherent problem in
the {@link org.apache.hadoop.hbase.constraint.Constraint}, rather than a failed {@link org.apache.hadoop.hbase.client.Put}. the {@link org.apache.hadoop.hbase.constraint.Constraint}, rather than a failed {@link org.apache.hadoop.hbase.client.Put}.

View File

@ -19,21 +19,19 @@
package org.apache.hadoop.hbase.thrift; package org.apache.hadoop.hbase.thrift;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.thrift.server.TThreadedSelectorServer; import org.apache.thrift.server.TThreadedSelectorServer;
import org.apache.thrift.transport.TNonblockingServerTransport; import org.apache.thrift.transport.TNonblockingServerTransport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* A TThreadedSelectorServer.Args that reads hadoop configuration * A TThreadedSelectorServer.Args that reads hadoop configuration
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
public class HThreadedSelectorServerArgs extends TThreadedSelectorServer.Args { public class HThreadedSelectorServerArgs extends TThreadedSelectorServer.Args {
private static final Log LOG = LogFactory.getLog(TThreadedSelectorServer.class);
private static final Logger LOG =
LoggerFactory.getLogger(TThreadedSelectorServer.class);
/** /**
* Number of selector threads for reading and writing socket * Number of selector threads for reading and writing socket

View File

@ -65,8 +65,8 @@ import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.slf4j.Logger; import org.apache.commons.logging.Log;
import org.slf4j.LoggerFactory; import org.apache.commons.logging.LogFactory;
/** /**
* Standup the master and fake it to test various aspects of master function. * Standup the master and fake it to test various aspects of master function.
@ -78,7 +78,7 @@ import org.slf4j.LoggerFactory;
*/ */
@Category(MediumTests.class) @Category(MediumTests.class)
public class TestMasterNoCluster { public class TestMasterNoCluster {
private static Logger LOG = LoggerFactory.getLogger(TestMasterNoCluster.class); private static final Log LOG = LogFactory.getLog(TestMasterNoCluster.class);
private static final HBaseTestingUtility TESTUTIL = new HBaseTestingUtility(); private static final HBaseTestingUtility TESTUTIL = new HBaseTestingUtility();
@BeforeClass @BeforeClass

101
pom.xml
View File

@ -461,6 +461,7 @@
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId> <artifactId>maven-release-plugin</artifactId>
<version>2.4.1</version>
<!--Making a release I've been using mvn 3.0 and specifying the apache-release <!--Making a release I've been using mvn 3.0 and specifying the apache-release
profile on the command line as follows: profile on the command line as follows:
@ -480,6 +481,7 @@
But it builds the test jar. From SUREFIRE-172. But it builds the test jar. From SUREFIRE-172.
--> -->
<arguments>-Dmaven.test.skip.exec</arguments> <arguments>-Dmaven.test.skip.exec</arguments>
<pomFileName>pom.xml</pomFileName>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
@ -609,6 +611,9 @@
<execution> <execution>
<phase>prepare-package</phase> <phase>prepare-package</phase>
<goals> <goals>
<!--This goal will install a -test.jar when we do install
See http://maven.apache.org/guides/mini/guide-attached-tests.html
-->
<goal>test-jar</goal> <goal>test-jar</goal>
</goals> </goals>
</execution> </execution>
@ -884,7 +889,9 @@
<commons-io.version>2.4</commons-io.version> <commons-io.version>2.4</commons-io.version>
<commons-lang.version>2.6</commons-lang.version> <commons-lang.version>2.6</commons-lang.version>
<commons-logging.version>1.1.1</commons-logging.version> <commons-logging.version>1.1.1</commons-logging.version>
<commons-math.version>2.1</commons-math.version> <commons-math.version>2.2</commons-math.version>
<collections.version>3.2.1</collections.version>
<httpclient.version>3.0.1</httpclient.version>
<metrics-core.version>2.1.2</metrics-core.version> <metrics-core.version>2.1.2</metrics-core.version>
<guava.version>12.0.1</guava.version> <guava.version>12.0.1</guava.version>
<jackson.version>1.8.8</jackson.version> <jackson.version>1.8.8</jackson.version>
@ -896,13 +903,13 @@
<jruby.version>1.6.8</jruby.version> <jruby.version>1.6.8</jruby.version>
<junit.version>4.11</junit.version> <junit.version>4.11</junit.version>
<htrace.version>1.50</htrace.version> <htrace.version>1.50</htrace.version>
<slf4j.version>1.4.3</slf4j.version>
<log4j.version>1.2.17</log4j.version> <log4j.version>1.2.17</log4j.version>
<mockito-all.version>1.9.0</mockito-all.version> <mockito-all.version>1.9.0</mockito-all.version>
<protobuf.version>2.4.1</protobuf.version> <protobuf.version>2.4.1</protobuf.version>
<stax-api.version>1.0.1</stax-api.version> <stax-api.version>1.0.1</stax-api.version>
<thrift.version>0.9.0</thrift.version> <thrift.version>0.9.0</thrift.version>
<zookeeper.version>3.4.5</zookeeper.version> <zookeeper.version>3.4.5</zookeeper.version>
<slf4j.version>1.6.4</slf4j.version>
<hadoop-snappy.version>0.0.1-SNAPSHOT</hadoop-snappy.version> <hadoop-snappy.version>0.0.1-SNAPSHOT</hadoop-snappy.version>
<clover.version>2.6.3</clover.version> <clover.version>2.6.3</clover.version>
<jamon-runtime.version>2.3.1</jamon-runtime.version> <jamon-runtime.version>2.3.1</jamon-runtime.version>
@ -1045,6 +1052,18 @@
<artifactId>jettison</artifactId> <artifactId>jettison</artifactId>
<version>${jettison.version}</version> <version>${jettison.version}</version>
</dependency> </dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
</dependency>
<!--This is not used by hbase directly. Used by thrift,
yammer and zk.-->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency> <dependency>
<groupId>com.yammer.metrics</groupId> <groupId>com.yammer.metrics</groupId>
<artifactId>metrics-core</artifactId> <artifactId>metrics-core</artifactId>
@ -1055,6 +1074,16 @@
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
<version>${guava.version}</version> <version>${guava.version}</version>
</dependency> </dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>${collections.version}</version>
</dependency>
<dependency>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>
<version>${httpclient.version}</version>
</dependency>
<dependency> <dependency>
<groupId>commons-cli</groupId> <groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId> <artifactId>commons-cli</artifactId>
@ -1090,11 +1119,6 @@
<artifactId>commons-math</artifactId> <artifactId>commons-math</artifactId>
<version>${commons-math.version}</version> <version>${commons-math.version}</version>
</dependency> </dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
</dependency>
<dependency> <dependency>
<groupId>org.apache.zookeeper</groupId> <groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId> <artifactId>zookeeper</artifactId>
@ -1203,16 +1227,6 @@
<artifactId>jackson-xc</artifactId> <artifactId>jackson-xc</artifactId>
<version>${jackson.version}</version> <version>${jackson.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency> <dependency>
<!--If this is not in the runtime lib, we get odd <!--If this is not in the runtime lib, we get odd
"2009-02-27 11:38:39.504::WARN: failed jsp "2009-02-27 11:38:39.504::WARN: failed jsp
@ -1297,6 +1311,12 @@
<groupId>junit</groupId> <groupId>junit</groupId>
<artifactId>junit</artifactId> <artifactId>junit</artifactId>
<version>${junit.version}</version> <version>${junit.version}</version>
<exclusions>
<exclusion>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-core</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.mockito</groupId> <groupId>org.mockito</groupId>
@ -1319,6 +1339,10 @@
<version>${findbugs-annotations}</version> <version>${findbugs-annotations}</version>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<!-- Test dependencies --> <!-- Test dependencies -->
<dependency> <dependency>
<groupId>junit</groupId> <groupId>junit</groupId>
@ -1438,7 +1462,8 @@
<id>hadoop-1.1</id> <id>hadoop-1.1</id>
<activation> <activation>
<property> <property>
<name>!hadoop.profile</name> <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h1--><name>!hadoop.profile</name>
</property> </property>
</activation> </activation>
<modules> <modules>
@ -1446,7 +1471,6 @@
</modules> </modules>
<properties> <properties>
<hadoop.version>${hadoop-one.version}</hadoop.version> <hadoop.version>${hadoop-one.version}</hadoop.version>
<slf4j.version>1.4.3</slf4j.version>
<compat.module>hbase-hadoop1-compat</compat.module> <compat.module>hbase-hadoop1-compat</compat.module>
<assembly.file>src/main/assembly/hadoop-one-compat.xml</assembly.file> <assembly.file>src/main/assembly/hadoop-one-compat.xml</assembly.file>
</properties> </properties>
@ -1507,7 +1531,6 @@
<hadoop.version>1.0.4</hadoop.version> <hadoop.version>1.0.4</hadoop.version>
<!-- Need to set this for the Hadoop 1 compat module --> <!-- Need to set this for the Hadoop 1 compat module -->
<hadoop-one.version>${hadoop.version}</hadoop-one.version> <hadoop-one.version>${hadoop.version}</hadoop-one.version>
<slf4j.version>1.4.3</slf4j.version>
<compat.module>hbase-hadoop1-compat</compat.module> <compat.module>hbase-hadoop1-compat</compat.module>
<assembly.file>src/main/assembly/hadoop-one-compat.xml</assembly.file> <assembly.file>src/main/assembly/hadoop-one-compat.xml</assembly.file>
</properties> </properties>
@ -1558,8 +1581,8 @@
<id>hadoop-2.0</id> <id>hadoop-2.0</id>
<activation> <activation>
<property> <property>
<name>hadoop.profile</name> <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<value>2.0</value> <!--h2--><name>hadoop.profile</name><value>2.0</value>
</property> </property>
</activation> </activation>
<modules> <modules>
@ -1567,12 +1590,43 @@
</modules> </modules>
<properties> <properties>
<hadoop.version>${hadoop-two.version}</hadoop.version> <hadoop.version>${hadoop-two.version}</hadoop.version>
<slf4j.version>1.6.1</slf4j.version>
<compat.module>hbase-hadoop2-compat</compat.module> <compat.module>hbase-hadoop2-compat</compat.module>
<assembly.file>src/main/assembly/hadoop-two-compat.xml</assembly.file> <assembly.file>src/main/assembly/hadoop-two-compat.xml</assembly.file>
</properties> </properties>
<dependencyManagement> <dependencyManagement>
<dependencies> <dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<version>${hadoop-two.version}</version>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop-two.version}</version>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
@ -1625,7 +1679,6 @@
</property> </property>
</activation> </activation>
<properties> <properties>
<slf4j.version>1.6.1</slf4j.version>
<hadoop.version>3.0.0-SNAPSHOT</hadoop.version> <hadoop.version>3.0.0-SNAPSHOT</hadoop.version>
</properties> </properties>
<dependencies> <dependencies>