HBASE-6087 Add hbase-common module

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1344510 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2012-05-30 23:51:44 +00:00
parent 3d586b8722
commit 17a12aa8e5
14 changed files with 286 additions and 63 deletions

View File

@ -137,6 +137,16 @@ add_to_cp_if_exists() {
fi fi
} }
# For releases, add hbase & webapps to CLASSPATH
# Webapps must come first else it messes up Jetty
if [ -d "$HBASE_HOME/hbase-webapps" ]; then
add_to_cp_if_exists "${HBASE_HOME}"
fi
#add if we are in a dev environment
if [ -d "$HBASE_HOME/hbase-server/target/hbase-webapps" ]; then
add_to_cp_if_exists "${HBASE_HOME}/hbase-server/target"
fi
add_maven_deps_to_classpath() { add_maven_deps_to_classpath() {
# Need to generate classpath from maven pom. This is costly so generate it # Need to generate classpath from maven pom. This is costly so generate it
# and cache it. Save the file into our target dir so a mvn clean will get # and cache it. Save the file into our target dir so a mvn clean will get
@ -168,23 +178,13 @@ add_maven_test_classes_to_classpath(){
done done
} }
# Add maven target directory #Add the development env class path stuff
if $in_dev_env; then if $in_dev_env; then
add_maven_deps_to_classpath add_maven_deps_to_classpath
add_maven_main_classes_to_classpath add_maven_main_classes_to_classpath
add_maven_test_classes_to_classpath add_maven_test_classes_to_classpath
fi fi
# For releases, add hbase & webapps to CLASSPATH
# Webapps must come first else it messes up Jetty
if [ -d "$HBASE_HOME/hbase-webapps" ]; then
add_to_cp_if_exists "${HBASE_HOME}"
fi
#add if we are in a dev environment
if [ -d "$HBASE_HOME/hbase-server/target/hbase-webapps" ]; then
add_to_cp_if_exists "${HBASE_HOME}/hbase-server/target"
fi
#add the hbase jars for each module #add the hbase jars for each module
for f in $HBASE_HOME/hbase-jars/hbase*.jar; do for f in $HBASE_HOME/hbase-jars/hbase*.jar; do
if [[ $f = *sources.jar ]] if [[ $f = *sources.jar ]]

View File

@ -184,6 +184,10 @@
</build> </build>
<dependencies> <dependencies>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
</dependency>
<dependency> <dependency>
<artifactId>hbase-server</artifactId> <artifactId>hbase-server</artifactId>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>

View File

@ -161,6 +161,7 @@
<excludes> <excludes>
<!-- Add new modules here --> <!-- Add new modules here -->
<exclude>hbase-server-${project.version}.jar</exclude> <exclude>hbase-server-${project.version}.jar</exclude>
<exclude>hbase-common-${project.version>.jar</exclude>
<exclude>target/</exclude> <exclude>target/</exclude>
<exclude>test/</exclude> <exclude>test/</exclude>
<exclude>.classpath</exclude> <exclude>.classpath</exclude>

197
hbase-common/pom.xml Normal file
View File

@ -0,0 +1,197 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-->
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>hbase</artifactId>
<groupId>org.apache.hbase</groupId>
<version>0.95-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
<artifactId>hbase-common</artifactId>
<name>HBase - Common</name>
<description>Common functionality for HBase</description>
<build>
<pluginManagement>
<plugins>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<!-- Always skip the second part executions, since we only run
simple unit tests in this module -->
<executions>
<execution>
<id>secondPartTestsExecution</id>
<phase>test</phase>
<goals>
<goal>test</goal>
</goals>
<configuration>
<skip>true</skip>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</pluginManagement>
</build>
<dependencies>
<!-- General dependencies -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</dependency>
</dependencies>
<profiles>
<!-- profile against Hadoop 1.0.x: This is the default. It has to have the same
activation property as the parent Hadoop 1.0.x profile to make sure it gets run at
the same time. -->
<profile>
<id>hadoop-1.0</id>
<activation>
<property>
<name>!hadoop.profile</name>
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-test</artifactId>
</dependency>
</dependencies>
</profile>
<!--
profile for building against Hadoop 2.0.0-alpha. Activate using:
mvn -Dhadoop.profile=2.0
-->
<profile>
<id>hadoop-2.0</id>
<activation>
<property>
<name>hadoop.profile</name>
<value>2.0</value>
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>create-mrapp-generated-classpath</id>
<phase>generate-test-resources</phase>
<goals>
<goal>build-classpath</goal>
</goals>
<configuration>
<!-- needed to run the unit test for DS to generate
the required classpath that is required in the env
of the launch container in the mini mr/yarn cluster
-->
<outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<!--
profile for building against Hadoop 3.0.x. Activate using:
mvn -Dhadoop.profile=3.0
-->
<profile>
<id>hadoop-3.0</id>
<activation>
<property>
<name>hadoop.profile</name>
<value>3.0</value>
</property>
</activation>
<properties>
<hadoop.version>3.0-SNAPSHOT</hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>create-mrapp-generated-classpath</id>
<phase>generate-test-resources</phase>
<goals>
<goal>build-classpath</goal>
</goals>
<configuration>
<!-- needed to run the unit test for DS to generate
the required classpath that is required in the env
of the launch container in the mini mr/yarn cluster
-->
<outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -1,6 +1,4 @@
/** /**
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
@ -49,8 +47,8 @@ public class HBaseConfiguration extends Configuration {
//TODO:replace with private constructor, HBaseConfiguration should not extend Configuration //TODO:replace with private constructor, HBaseConfiguration should not extend Configuration
super(); super();
addHbaseResources(this); addHbaseResources(this);
LOG.warn("instantiating HBaseConfiguration() is deprecated. Please use" + LOG.warn("instantiating HBaseConfiguration() is deprecated. Please use"
" HBaseConfiguration#create() to construct a plain Configuration"); + " HBaseConfiguration#create() to construct a plain Configuration");
} }
/** /**
@ -83,7 +81,7 @@ public class HBaseConfiguration extends Configuration {
HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT); HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT);
int bcul = (int)(blockCacheUpperLimit * CONVERT_TO_PERCENTAGE); int bcul = (int)(blockCacheUpperLimit * CONVERT_TO_PERCENTAGE);
if (CONVERT_TO_PERCENTAGE - (gml + bcul) if (CONVERT_TO_PERCENTAGE - (gml + bcul)
< (int)(CONVERT_TO_PERCENTAGE * < (int)(CONVERT_TO_PERCENTAGE *
HConstants.HBASE_CLUSTER_MINIMUM_MEMORY_THRESHOLD)) { HConstants.HBASE_CLUSTER_MINIMUM_MEMORY_THRESHOLD)) {
throw new RuntimeException( throw new RuntimeException(
"Current heap configuration for MemStore and BlockCache exceeds " + "Current heap configuration for MemStore and BlockCache exceeds " +
@ -91,7 +89,7 @@ public class HBaseConfiguration extends Configuration {
"The combined value cannot exceed 0.8. Please check " + "The combined value cannot exceed 0.8. Please check " +
"the settings for hbase.regionserver.global.memstore.upperLimit and " + "the settings for hbase.regionserver.global.memstore.upperLimit and " +
"hfile.block.cache.size in your configuration. " + "hfile.block.cache.size in your configuration. " +
"hbase.regionserver.global.memstore.upperLimit is " + "hbase.regionserver.global.memstore.upperLimit is " +
globalMemstoreLimit + globalMemstoreLimit +
" hfile.block.cache.size is " + blockCacheUpperLimit); " hfile.block.cache.size is " + blockCacheUpperLimit);
} }
@ -135,19 +133,17 @@ public class HBaseConfiguration extends Configuration {
destConf.set(e.getKey(), e.getValue()); destConf.set(e.getKey(), e.getValue());
} }
} }
/** /**
*
* @return whether to show HBase Configuration in servlet * @return whether to show HBase Configuration in servlet
*/ */
public static boolean isShowConfInServlet() { public static boolean isShowConfInServlet() {
boolean isShowConf = false; boolean isShowConf = false;
try { try {
if (Class.forName("org.apache.hadoop.conf.ConfServlet") != null) { if (Class.forName("org.apache.hadoop.conf.ConfServlet") != null) {
isShowConf = true; isShowConf = true;
} }
} catch (Exception e) { } catch (Exception e) {
} }
return isShowConf; return isShowConf;
} }

View File

@ -1,6 +1,4 @@
/** /**
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
@ -571,9 +569,6 @@ public final class HConstants {
/** HBCK special code name used as server name when manipulating ZK nodes */ /** HBCK special code name used as server name when manipulating ZK nodes */
public static final String HBCK_CODE_NAME = "HBCKServerName"; public static final String HBCK_CODE_NAME = "HBCKServerName";
public static final ServerName HBCK_CODE_SERVERNAME =
new ServerName(HBCK_CODE_NAME, -1, -1L);
public static final String KEY_FOR_HOSTNAME_SEEN_BY_MASTER = public static final String KEY_FOR_HOSTNAME_SEEN_BY_MASTER =
"hbase.regionserver.hostname.seen.by.master"; "hbase.regionserver.hostname.seen.by.master";

View File

@ -1,4 +1,4 @@
/* /**
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information

View File

@ -1,6 +1,4 @@
/** /**
* Copyright 2009 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information

View File

@ -1,6 +1,4 @@
/** /**
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information

View File

@ -1,6 +1,4 @@
/** /**
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
@ -26,7 +24,6 @@ import java.io.PrintWriter;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.VersionAnnotation; import org.apache.hadoop.hbase.VersionAnnotation;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
/** /**
@ -92,7 +89,7 @@ public class VersionInfo {
public static String getUrl() { public static String getUrl() {
return version != null ? version.url() : "Unknown"; return version != null ? version.url() : "Unknown";
} }
static String[] versionReport() { static String[] versionReport() {
return new String[] { return new String[] {
"HBase " + getVersion(), "HBase " + getVersion(),

View File

@ -291,11 +291,25 @@
</lifecycleMappingMetadata> </lifecycleMappingMetadata>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<version>${surefire.version}</version>
<configuration>
<!-- Have to set the groups here because we only do
split tests in this package, so groups on live in this module -->
<groups>${surefire.firstPartGroups}</groups>
</configuration>
</plugin>
</plugins> </plugins>
</pluginManagement> </pluginManagement>
</build> </build>
<dependencies> <dependencies>
<!-- Intra-project dependencies -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
</dependency>
<!-- General dependencies --> <!-- General dependencies -->
<dependency> <dependency>
<groupId>com.yammer.metrics</groupId> <groupId>com.yammer.metrics</groupId>
@ -496,8 +510,8 @@
</profile> </profile>
<!-- <!--
profile for building against Hadoop 0.23.0. Activate using: profile for building against Hadoop 2.0.0-alpha. Activate using:
mvn -Dhadoop.profile=23 mvn -Dhadoop.profile=2.0
--> -->
<profile> <profile>
<id>hadoop-2.0</id> <id>hadoop-2.0</id>

View File

@ -105,6 +105,9 @@ public class AssignmentManager extends ZooKeeperListener {
private static final Log LOG = LogFactory.getLog(AssignmentManager.class); private static final Log LOG = LogFactory.getLog(AssignmentManager.class);
public static final ServerName HBCK_CODE_SERVERNAME = new ServerName(HConstants.HBCK_CODE_NAME,
-1, -1L);
protected Server master; protected Server master;
private ServerManager serverManager; private ServerManager serverManager;
@ -709,7 +712,7 @@ public class AssignmentManager extends ZooKeeperListener {
return; return;
} }
// Check if this is a special HBCK transition // Check if this is a special HBCK transition
if (sn.equals(HConstants.HBCK_CODE_SERVERNAME)) { if (sn.equals(HBCK_CODE_SERVERNAME)) {
handleHBCK(rt); handleHBCK(rt);
return; return;
} }

View File

@ -69,18 +69,27 @@ git clone git://git.apache.org/hbase.git
<para>If you cloned the project via git, download and install the Git plugin (EGit). Attach to your local git repo (via the Git Repositories window) and you'll be able to see file revision history, generate patches, etc.</para> <para>If you cloned the project via git, download and install the Git plugin (EGit). Attach to your local git repo (via the Git Repositories window) and you'll be able to see file revision history, generate patches, etc.</para>
</section> </section>
<section xml:id="eclipse.maven.setup"> <section xml:id="eclipse.maven.setup">
<title>HBase Project Setup</title> <title>HBase Project Setup in Eclipse</title>
To set up your Eclipse environment for HBase, close Eclipse and execute... <para>The easiest way is to use the m2eclipse plugin for Eclipse. Eclipse Indigo or newer has m2eclipse built-in, or it can be found here:http://www.eclipse.org/m2e/. M2Eclipse provides Maven integration for Eclipse - it even lets you use the direct Maven commands from within Eclipse to compile and test your project.</para>
<programlisting> <para>To import the project, you merely need to go to File->Import...Maven->Existing Maven Projects and then point Eclipse at the HBase root directory; m2eclipse will automatically find all the hbase modules for you.</para>
mvn eclipse:eclipse <para>If you install m2eclipse and import HBase in your workspace, you will have to fix your eclipse Build Path.
</programlisting> Remove <filename>target</filename> folder, add <filename>target/generated-jamon</filename>
and <filename>target/generated-sources/java</filename> folders. You may also remove from your Build Path
the exclusions on the <filename>src/main/resources</filename> and <filename>src/test/resources</filename>
to avoid error message in the console 'Failed to execute goal org.apache.maven.plugins:maven-antrun-plugin:1.6:run (default) on project hbase:
'An Ant BuildException has occured: Replace: source file .../target/classes/hbase-default.xml doesn't exist'. This will also
reduce the eclipse build cycles and make your life easier when developing.</para>
</section>
<section xml:id="eclipse.commandline">
<title>Import into eclipse with the command line</title>
<para>For those not inclined to use m2eclipse, you can generate the Eclipse files from the command line. First, run (you should only have to do this once):
<programlisting>mvn clean install -DskipTests</programlisting>
and then close Eclipse and execute...
<programlisting>mvn eclipse:eclipse</programlisting>
... from your local HBase project directory in your workspace to generate some new <filename>.project</filename> ... from your local HBase project directory in your workspace to generate some new <filename>.project</filename>
and <filename>.classpath</filename>files. Then reopen Eclipse, and import the .project file in the HBase directory to a workspace. and <filename>.classpath</filename>files. Then reopen Eclipse, and import the .project file in the HBase directory to a workspace.
</para>
</section> </section>
<section xml:id="eclipse.maven.plugin">
<title>Maven Plugin</title>
<para>Download and install the Maven plugin. For example, Help -&gt; Install New Software -&gt; (search for Maven Plugin)</para>
</section>
<section xml:id="eclipse.maven.class"> <section xml:id="eclipse.maven.class">
<title>Maven Classpath Variable</title> <title>Maven Classpath Variable</title>
<para>The <varname>M2_REPO</varname> classpath variable needs to be set up for the project. This needs to be set to <para>The <varname>M2_REPO</varname> classpath variable needs to be set up for the project. This needs to be set to
@ -95,16 +104,6 @@ Unbound classpath variable: 'M2_REPO/com/google/guava/guava/r09/guava-r09.jar' i
Unbound classpath variable: 'M2_REPO/com/google/protobuf/protobuf-java/2.3.0/protobuf-java-2.3.0.jar' in project 'hbase' hbase Build path Build Path Problem Unbound classpath variable: Unbound classpath variable: 'M2_REPO/com/google/protobuf/protobuf-java/2.3.0/protobuf-java-2.3.0.jar' in project 'hbase' hbase Build path Build Path Problem Unbound classpath variable:
</programlisting> </programlisting>
</section> </section>
<section xml:id="eclipse.m2eclipse">
<title>Import via m2eclipse</title>
<para>If you install the m2eclipse and import the HBase pom.xml in your workspace, you will have to fix your eclipse Build Path.
Remove <filename>target</filename> folder, add <filename>target/generated-jamon</filename>
and <filename>target/generated-sources/java</filename> folders. You may also remove from your Build Path
the exclusions on the <filename>src/main/resources</filename> and <filename>src/test/resources</filename>
to avoid error message in the console 'Failed to execute goal org.apache.maven.plugins:maven-antrun-plugin:1.6:run (default) on project hbase:
'An Ant BuildException has occured: Replace: source file .../target/classes/hbase-default.xml doesn't exist'. This will also
reduce the eclipse build cycles and make your life easier when developing.</para>
</section>
<section xml:id="eclipse.issues"> <section xml:id="eclipse.issues">
<title>Eclipse Known Issues</title> <title>Eclipse Known Issues</title>
<para>Eclipse will currently complain about <filename>Bytes.java</filename>. It is not possible to turn these errors off.</para> <para>Eclipse will currently complain about <filename>Bytes.java</filename>. It is not possible to turn these errors off.</para>
@ -617,6 +616,21 @@ mvn compile
<para>This convention comes from our parent project Hadoop.</para> <para>This convention comes from our parent project Hadoop.</para>
</section> <!-- code.standards --> </section> <!-- code.standards -->
<section xml:id="run.insitu">
<title>Running In-Situ</title>
<para>If you are developing HBase, frequently it is useful to test your changes against a more-real cluster than what you find in unit tests. In this case, HBase can be run directly from the source in local-mode.
All you need to do is run:
</para>
<programlisting>${HBASE_HOME}/bin/start-hbase.sh</programlisting>
<para>
This will spin up a full local-cluster, just as if you had packaged up HBase and installed it on your machine.
</para>
<para>Keep in mind that you will need to have installed HBase into your local maven repository for the in-situ cluster to work properly. That is, you will need to run:</para>
<programlisting>mvn clean install -DskipTests</programlisting>
<para>to ensure that maven can find the correct classpath and dependencies. Generally, the above command
is just a good thing to try running first, if maven is acting oddly.</para>
</section> <!-- run.insitu -->
</section> <!-- developing --> </section> <!-- developing -->
<section xml:id="submitting.patches"> <section xml:id="submitting.patches">

18
pom.xml
View File

@ -56,6 +56,7 @@
<module>hbase-assembly</module> <module>hbase-assembly</module>
<module>hbase-server</module> <module>hbase-server</module>
<module>hbase-site</module> <module>hbase-site</module>
<module>hbase-common</module>
</modules> </modules>
<scm> <scm>
@ -423,15 +424,15 @@
<version>${surefire.version}</version> <version>${surefire.version}</version>
</dependency> </dependency>
</dependencies> </dependencies>
<!-- Generic testing configuration for all packages -->
<configuration> <configuration>
<skip>${surefire.skipFirstPart}</skip> <skip>${surefire.skipFirstPart}</skip>
<forkMode>${surefire.firstPartForkMode}</forkMode> <forkMode>${surefire.firstPartForkMode}</forkMode>
<parallel>${surefire.firstPartParallel}</parallel> <parallel>${surefire.firstPartParallel}</parallel>
<perCoreThreadCount>false</perCoreThreadCount> <perCoreThreadCount>false</perCoreThreadCount>
<threadCount>${surefire.firstPartThreadCount}</threadCount> <threadCount>${surefire.firstPartThreadCount}</threadCount>
<parallel>classes</parallel><!-- surefire hack, if not we're using method parallelisation <parallel>classes</parallel><!-- surefire hack, if not we're
class ! --> using method parallelization class ! -->
<groups>${surefire.firstPartGroups}</groups>
<testFailureIgnore>false</testFailureIgnore> <testFailureIgnore>false</testFailureIgnore>
<forkedProcessTimeoutInSeconds>900</forkedProcessTimeoutInSeconds> <forkedProcessTimeoutInSeconds>900</forkedProcessTimeoutInSeconds>
<argLine>-enableassertions -Xmx1900m <argLine>-enableassertions -Xmx1900m
@ -730,6 +731,11 @@
--> -->
<!-- Intra-module dependencies --> <!-- Intra-module dependencies -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency> <dependency>
<artifactId>hbase-server</artifactId> <artifactId>hbase-server</artifactId>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
@ -745,9 +751,9 @@
<!-- General dependencies --> <!-- General dependencies -->
<dependency> <dependency>
<groupId>com.yammer.metrics</groupId> <groupId>com.yammer.metrics</groupId>
<artifactId>metrics-core</artifactId> <artifactId>metrics-core</artifactId>
<version>${metrics-core.version}</version> <version>${metrics-core.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>