HBASE-6087 Add hbase-common module
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1344510 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
3d586b8722
commit
17a12aa8e5
22
bin/hbase
22
bin/hbase
|
@ -137,6 +137,16 @@ add_to_cp_if_exists() {
|
|||
fi
|
||||
}
|
||||
|
||||
# For releases, add hbase & webapps to CLASSPATH
|
||||
# Webapps must come first else it messes up Jetty
|
||||
if [ -d "$HBASE_HOME/hbase-webapps" ]; then
|
||||
add_to_cp_if_exists "${HBASE_HOME}"
|
||||
fi
|
||||
#add if we are in a dev environment
|
||||
if [ -d "$HBASE_HOME/hbase-server/target/hbase-webapps" ]; then
|
||||
add_to_cp_if_exists "${HBASE_HOME}/hbase-server/target"
|
||||
fi
|
||||
|
||||
add_maven_deps_to_classpath() {
|
||||
# Need to generate classpath from maven pom. This is costly so generate it
|
||||
# and cache it. Save the file into our target dir so a mvn clean will get
|
||||
|
@ -168,23 +178,13 @@ add_maven_test_classes_to_classpath(){
|
|||
done
|
||||
}
|
||||
|
||||
# Add maven target directory
|
||||
#Add the development env class path stuff
|
||||
if $in_dev_env; then
|
||||
add_maven_deps_to_classpath
|
||||
add_maven_main_classes_to_classpath
|
||||
add_maven_test_classes_to_classpath
|
||||
fi
|
||||
|
||||
# For releases, add hbase & webapps to CLASSPATH
|
||||
# Webapps must come first else it messes up Jetty
|
||||
if [ -d "$HBASE_HOME/hbase-webapps" ]; then
|
||||
add_to_cp_if_exists "${HBASE_HOME}"
|
||||
fi
|
||||
#add if we are in a dev environment
|
||||
if [ -d "$HBASE_HOME/hbase-server/target/hbase-webapps" ]; then
|
||||
add_to_cp_if_exists "${HBASE_HOME}/hbase-server/target"
|
||||
fi
|
||||
|
||||
#add the hbase jars for each module
|
||||
for f in $HBASE_HOME/hbase-jars/hbase*.jar; do
|
||||
if [[ $f = *sources.jar ]]
|
||||
|
|
|
@ -184,6 +184,10 @@
|
|||
</build>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-common</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<artifactId>hbase-server</artifactId>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
|
|
|
@ -161,6 +161,7 @@
|
|||
<excludes>
|
||||
<!-- Add new modules here -->
|
||||
<exclude>hbase-server-${project.version}.jar</exclude>
|
||||
<exclude>hbase-common-${project.version>.jar</exclude>
|
||||
<exclude>target/</exclude>
|
||||
<exclude>test/</exclude>
|
||||
<exclude>.classpath</exclude>
|
||||
|
|
|
@ -0,0 +1,197 @@
|
|||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<!--
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
-->
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<artifactId>hbase</artifactId>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<version>0.95-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>hbase-common</artifactId>
|
||||
<name>HBase - Common</name>
|
||||
<description>Common functionality for HBase</description>
|
||||
|
||||
<build>
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<!-- Always skip the second part executions, since we only run
|
||||
simple unit tests in this module -->
|
||||
<executions>
|
||||
<execution>
|
||||
<id>secondPartTestsExecution</id>
|
||||
<phase>test</phase>
|
||||
<goals>
|
||||
<goal>test</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<skip>true</skip>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
</build>
|
||||
|
||||
<dependencies>
|
||||
<!-- General dependencies -->
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<profiles>
|
||||
<!-- profile against Hadoop 1.0.x: This is the default. It has to have the same
|
||||
activation property as the parent Hadoop 1.0.x profile to make sure it gets run at
|
||||
the same time. -->
|
||||
<profile>
|
||||
<id>hadoop-1.0</id>
|
||||
<activation>
|
||||
<property>
|
||||
<name>!hadoop.profile</name>
|
||||
</property>
|
||||
</activation>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-test</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</profile>
|
||||
|
||||
<!--
|
||||
profile for building against Hadoop 2.0.0-alpha. Activate using:
|
||||
mvn -Dhadoop.profile=2.0
|
||||
-->
|
||||
<profile>
|
||||
<id>hadoop-2.0</id>
|
||||
<activation>
|
||||
<property>
|
||||
<name>hadoop.profile</name>
|
||||
<value>2.0</value>
|
||||
</property>
|
||||
</activation>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-annotations</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-minicluster</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>create-mrapp-generated-classpath</id>
|
||||
<phase>generate-test-resources</phase>
|
||||
<goals>
|
||||
<goal>build-classpath</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<!-- needed to run the unit test for DS to generate
|
||||
the required classpath that is required in the env
|
||||
of the launch container in the mini mr/yarn cluster
|
||||
-->
|
||||
<outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
<!--
|
||||
profile for building against Hadoop 3.0.x. Activate using:
|
||||
mvn -Dhadoop.profile=3.0
|
||||
-->
|
||||
<profile>
|
||||
<id>hadoop-3.0</id>
|
||||
<activation>
|
||||
<property>
|
||||
<name>hadoop.profile</name>
|
||||
<value>3.0</value>
|
||||
</property>
|
||||
</activation>
|
||||
<properties>
|
||||
<hadoop.version>3.0-SNAPSHOT</hadoop.version>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-annotations</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-minicluster</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>create-mrapp-generated-classpath</id>
|
||||
<phase>generate-test-resources</phase>
|
||||
<goals>
|
||||
<goal>build-classpath</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<!-- needed to run the unit test for DS to generate
|
||||
the required classpath that is required in the env
|
||||
of the launch container in the mini mr/yarn cluster
|
||||
-->
|
||||
<outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
</profiles>
|
||||
</project>
|
|
@ -1,6 +1,4 @@
|
|||
/**
|
||||
* Copyright 2007 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
|
@ -49,8 +47,8 @@ public class HBaseConfiguration extends Configuration {
|
|||
//TODO:replace with private constructor, HBaseConfiguration should not extend Configuration
|
||||
super();
|
||||
addHbaseResources(this);
|
||||
LOG.warn("instantiating HBaseConfiguration() is deprecated. Please use" +
|
||||
" HBaseConfiguration#create() to construct a plain Configuration");
|
||||
LOG.warn("instantiating HBaseConfiguration() is deprecated. Please use"
|
||||
+ " HBaseConfiguration#create() to construct a plain Configuration");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -83,7 +81,7 @@ public class HBaseConfiguration extends Configuration {
|
|||
HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT);
|
||||
int bcul = (int)(blockCacheUpperLimit * CONVERT_TO_PERCENTAGE);
|
||||
if (CONVERT_TO_PERCENTAGE - (gml + bcul)
|
||||
< (int)(CONVERT_TO_PERCENTAGE *
|
||||
< (int)(CONVERT_TO_PERCENTAGE *
|
||||
HConstants.HBASE_CLUSTER_MINIMUM_MEMORY_THRESHOLD)) {
|
||||
throw new RuntimeException(
|
||||
"Current heap configuration for MemStore and BlockCache exceeds " +
|
||||
|
@ -91,7 +89,7 @@ public class HBaseConfiguration extends Configuration {
|
|||
"The combined value cannot exceed 0.8. Please check " +
|
||||
"the settings for hbase.regionserver.global.memstore.upperLimit and " +
|
||||
"hfile.block.cache.size in your configuration. " +
|
||||
"hbase.regionserver.global.memstore.upperLimit is " +
|
||||
"hbase.regionserver.global.memstore.upperLimit is " +
|
||||
globalMemstoreLimit +
|
||||
" hfile.block.cache.size is " + blockCacheUpperLimit);
|
||||
}
|
||||
|
@ -135,19 +133,17 @@ public class HBaseConfiguration extends Configuration {
|
|||
destConf.set(e.getKey(), e.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @return whether to show HBase Configuration in servlet
|
||||
*/
|
||||
public static boolean isShowConfInServlet() {
|
||||
boolean isShowConf = false;
|
||||
try {
|
||||
if (Class.forName("org.apache.hadoop.conf.ConfServlet") != null) {
|
||||
isShowConf = true;
|
||||
isShowConf = true;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
|
||||
}
|
||||
return isShowConf;
|
||||
}
|
|
@ -1,6 +1,4 @@
|
|||
/**
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
|
@ -571,9 +569,6 @@ public final class HConstants {
|
|||
/** HBCK special code name used as server name when manipulating ZK nodes */
|
||||
public static final String HBCK_CODE_NAME = "HBCKServerName";
|
||||
|
||||
public static final ServerName HBCK_CODE_SERVERNAME =
|
||||
new ServerName(HBCK_CODE_NAME, -1, -1L);
|
||||
|
||||
public static final String KEY_FOR_HOSTNAME_SEEN_BY_MASTER =
|
||||
"hbase.regionserver.hostname.seen.by.master";
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
/*
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
|
@ -1,6 +1,4 @@
|
|||
/**
|
||||
* Copyright 2009 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
|
@ -1,6 +1,4 @@
|
|||
/**
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
|
@ -1,6 +1,4 @@
|
|||
/**
|
||||
* Copyright 2010 The Apache Software Foundation
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
|
@ -26,7 +24,6 @@ import java.io.PrintWriter;
|
|||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.hbase.VersionAnnotation;
|
||||
import org.apache.hadoop.hbase.master.HMaster;
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
/**
|
||||
|
@ -92,7 +89,7 @@ public class VersionInfo {
|
|||
public static String getUrl() {
|
||||
return version != null ? version.url() : "Unknown";
|
||||
}
|
||||
|
||||
|
||||
static String[] versionReport() {
|
||||
return new String[] {
|
||||
"HBase " + getVersion(),
|
|
@ -291,11 +291,25 @@
|
|||
</lifecycleMappingMetadata>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>${surefire.version}</version>
|
||||
<configuration>
|
||||
<!-- Have to set the groups here because we only do
|
||||
split tests in this package, so groups on live in this module -->
|
||||
<groups>${surefire.firstPartGroups}</groups>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
</build>
|
||||
|
||||
<dependencies>
|
||||
<!-- Intra-project dependencies -->
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-common</artifactId>
|
||||
</dependency>
|
||||
<!-- General dependencies -->
|
||||
<dependency>
|
||||
<groupId>com.yammer.metrics</groupId>
|
||||
|
@ -496,8 +510,8 @@
|
|||
</profile>
|
||||
|
||||
<!--
|
||||
profile for building against Hadoop 0.23.0. Activate using:
|
||||
mvn -Dhadoop.profile=23
|
||||
profile for building against Hadoop 2.0.0-alpha. Activate using:
|
||||
mvn -Dhadoop.profile=2.0
|
||||
-->
|
||||
<profile>
|
||||
<id>hadoop-2.0</id>
|
||||
|
|
|
@ -105,6 +105,9 @@ public class AssignmentManager extends ZooKeeperListener {
|
|||
|
||||
private static final Log LOG = LogFactory.getLog(AssignmentManager.class);
|
||||
|
||||
public static final ServerName HBCK_CODE_SERVERNAME = new ServerName(HConstants.HBCK_CODE_NAME,
|
||||
-1, -1L);
|
||||
|
||||
protected Server master;
|
||||
|
||||
private ServerManager serverManager;
|
||||
|
@ -709,7 +712,7 @@ public class AssignmentManager extends ZooKeeperListener {
|
|||
return;
|
||||
}
|
||||
// Check if this is a special HBCK transition
|
||||
if (sn.equals(HConstants.HBCK_CODE_SERVERNAME)) {
|
||||
if (sn.equals(HBCK_CODE_SERVERNAME)) {
|
||||
handleHBCK(rt);
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -69,18 +69,27 @@ git clone git://git.apache.org/hbase.git
|
|||
<para>If you cloned the project via git, download and install the Git plugin (EGit). Attach to your local git repo (via the Git Repositories window) and you'll be able to see file revision history, generate patches, etc.</para>
|
||||
</section>
|
||||
<section xml:id="eclipse.maven.setup">
|
||||
<title>HBase Project Setup</title>
|
||||
To set up your Eclipse environment for HBase, close Eclipse and execute...
|
||||
<programlisting>
|
||||
mvn eclipse:eclipse
|
||||
</programlisting>
|
||||
<title>HBase Project Setup in Eclipse</title>
|
||||
<para>The easiest way is to use the m2eclipse plugin for Eclipse. Eclipse Indigo or newer has m2eclipse built-in, or it can be found here:http://www.eclipse.org/m2e/. M2Eclipse provides Maven integration for Eclipse - it even lets you use the direct Maven commands from within Eclipse to compile and test your project.</para>
|
||||
<para>To import the project, you merely need to go to File->Import...Maven->Existing Maven Projects and then point Eclipse at the HBase root directory; m2eclipse will automatically find all the hbase modules for you.</para>
|
||||
<para>If you install m2eclipse and import HBase in your workspace, you will have to fix your eclipse Build Path.
|
||||
Remove <filename>target</filename> folder, add <filename>target/generated-jamon</filename>
|
||||
and <filename>target/generated-sources/java</filename> folders. You may also remove from your Build Path
|
||||
the exclusions on the <filename>src/main/resources</filename> and <filename>src/test/resources</filename>
|
||||
to avoid error message in the console 'Failed to execute goal org.apache.maven.plugins:maven-antrun-plugin:1.6:run (default) on project hbase:
|
||||
'An Ant BuildException has occured: Replace: source file .../target/classes/hbase-default.xml doesn't exist'. This will also
|
||||
reduce the eclipse build cycles and make your life easier when developing.</para>
|
||||
</section>
|
||||
<section xml:id="eclipse.commandline">
|
||||
<title>Import into eclipse with the command line</title>
|
||||
<para>For those not inclined to use m2eclipse, you can generate the Eclipse files from the command line. First, run (you should only have to do this once):
|
||||
<programlisting>mvn clean install -DskipTests</programlisting>
|
||||
and then close Eclipse and execute...
|
||||
<programlisting>mvn eclipse:eclipse</programlisting>
|
||||
... from your local HBase project directory in your workspace to generate some new <filename>.project</filename>
|
||||
and <filename>.classpath</filename>files. Then reopen Eclipse, and import the .project file in the HBase directory to a workspace.
|
||||
</para>
|
||||
</section>
|
||||
<section xml:id="eclipse.maven.plugin">
|
||||
<title>Maven Plugin</title>
|
||||
<para>Download and install the Maven plugin. For example, Help -> Install New Software -> (search for Maven Plugin)</para>
|
||||
</section>
|
||||
<section xml:id="eclipse.maven.class">
|
||||
<title>Maven Classpath Variable</title>
|
||||
<para>The <varname>M2_REPO</varname> classpath variable needs to be set up for the project. This needs to be set to
|
||||
|
@ -95,16 +104,6 @@ Unbound classpath variable: 'M2_REPO/com/google/guava/guava/r09/guava-r09.jar' i
|
|||
Unbound classpath variable: 'M2_REPO/com/google/protobuf/protobuf-java/2.3.0/protobuf-java-2.3.0.jar' in project 'hbase' hbase Build path Build Path Problem Unbound classpath variable:
|
||||
</programlisting>
|
||||
</section>
|
||||
<section xml:id="eclipse.m2eclipse">
|
||||
<title>Import via m2eclipse</title>
|
||||
<para>If you install the m2eclipse and import the HBase pom.xml in your workspace, you will have to fix your eclipse Build Path.
|
||||
Remove <filename>target</filename> folder, add <filename>target/generated-jamon</filename>
|
||||
and <filename>target/generated-sources/java</filename> folders. You may also remove from your Build Path
|
||||
the exclusions on the <filename>src/main/resources</filename> and <filename>src/test/resources</filename>
|
||||
to avoid error message in the console 'Failed to execute goal org.apache.maven.plugins:maven-antrun-plugin:1.6:run (default) on project hbase:
|
||||
'An Ant BuildException has occured: Replace: source file .../target/classes/hbase-default.xml doesn't exist'. This will also
|
||||
reduce the eclipse build cycles and make your life easier when developing.</para>
|
||||
</section>
|
||||
<section xml:id="eclipse.issues">
|
||||
<title>Eclipse Known Issues</title>
|
||||
<para>Eclipse will currently complain about <filename>Bytes.java</filename>. It is not possible to turn these errors off.</para>
|
||||
|
@ -617,6 +616,21 @@ mvn compile
|
|||
<para>This convention comes from our parent project Hadoop.</para>
|
||||
</section> <!-- code.standards -->
|
||||
|
||||
<section xml:id="run.insitu">
|
||||
<title>Running In-Situ</title>
|
||||
<para>If you are developing HBase, frequently it is useful to test your changes against a more-real cluster than what you find in unit tests. In this case, HBase can be run directly from the source in local-mode.
|
||||
All you need to do is run:
|
||||
</para>
|
||||
<programlisting>${HBASE_HOME}/bin/start-hbase.sh</programlisting>
|
||||
<para>
|
||||
This will spin up a full local-cluster, just as if you had packaged up HBase and installed it on your machine.
|
||||
</para>
|
||||
<para>Keep in mind that you will need to have installed HBase into your local maven repository for the in-situ cluster to work properly. That is, you will need to run:</para>
|
||||
<programlisting>mvn clean install -DskipTests</programlisting>
|
||||
<para>to ensure that maven can find the correct classpath and dependencies. Generally, the above command
|
||||
is just a good thing to try running first, if maven is acting oddly.</para>
|
||||
</section> <!-- run.insitu -->
|
||||
|
||||
</section> <!-- developing -->
|
||||
|
||||
<section xml:id="submitting.patches">
|
||||
|
|
18
pom.xml
18
pom.xml
|
@ -56,6 +56,7 @@
|
|||
<module>hbase-assembly</module>
|
||||
<module>hbase-server</module>
|
||||
<module>hbase-site</module>
|
||||
<module>hbase-common</module>
|
||||
</modules>
|
||||
|
||||
<scm>
|
||||
|
@ -423,15 +424,15 @@
|
|||
<version>${surefire.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<!-- Generic testing configuration for all packages -->
|
||||
<configuration>
|
||||
<skip>${surefire.skipFirstPart}</skip>
|
||||
<forkMode>${surefire.firstPartForkMode}</forkMode>
|
||||
<parallel>${surefire.firstPartParallel}</parallel>
|
||||
<perCoreThreadCount>false</perCoreThreadCount>
|
||||
<threadCount>${surefire.firstPartThreadCount}</threadCount>
|
||||
<parallel>classes</parallel><!-- surefire hack, if not we're using method parallelisation
|
||||
class ! -->
|
||||
<groups>${surefire.firstPartGroups}</groups>
|
||||
<parallel>classes</parallel><!-- surefire hack, if not we're
|
||||
using method parallelization class ! -->
|
||||
<testFailureIgnore>false</testFailureIgnore>
|
||||
<forkedProcessTimeoutInSeconds>900</forkedProcessTimeoutInSeconds>
|
||||
<argLine>-enableassertions -Xmx1900m
|
||||
|
@ -730,6 +731,11 @@
|
|||
-->
|
||||
|
||||
<!-- Intra-module dependencies -->
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-common</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<artifactId>hbase-server</artifactId>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
|
@ -745,9 +751,9 @@
|
|||
|
||||
<!-- General dependencies -->
|
||||
<dependency>
|
||||
<groupId>com.yammer.metrics</groupId>
|
||||
<artifactId>metrics-core</artifactId>
|
||||
<version>${metrics-core.version}</version>
|
||||
<groupId>com.yammer.metrics</groupId>
|
||||
<artifactId>metrics-core</artifactId>
|
||||
<version>${metrics-core.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
|
|
Loading…
Reference in New Issue