HBASE-3873 Mavenize Hadoop Snappy JAR/SOs project dependencies

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1132833 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2011-06-06 23:16:09 +00:00
parent 06eba7a579
commit 573c0a89bf
6 changed files with 149 additions and 12 deletions

View File

@ -241,6 +241,8 @@ Release 0.91.0 - Unreleased
HBASE-3592 Guava snuck back in as a dependency via hbase-3777 HBASE-3592 Guava snuck back in as a dependency via hbase-3777
HBASE-3808 Implement Executor.toString for master handlers at least HBASE-3808 Implement Executor.toString for master handlers at least
(Brock Noland) (Brock Noland)
HBASE-3873 Mavenize Hadoop Snappy JAR/SOs project dependencies
(Alejandro Abdelnur)
TASKS TASKS
HBASE-3559 Move report of split to master OFF the heartbeat channel HBASE-3559 Move report of split to master OFF the heartbeat channel

116
pom.xml
View File

@ -416,10 +416,15 @@
<exclude>**/*$*</exclude> <exclude>**/*$*</exclude>
<exclude>${test.exclude.pattern}</exclude> <exclude>${test.exclude.pattern}</exclude>
</excludes> </excludes>
<environmentVariables>
<LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/nativelib</LD_LIBRARY_PATH>
<DYLD_LIBRARY_PATH>${env.DYLD_LIBRARY_PATH}:${project.build.directory}/nativelib</DYLD_LIBRARY_PATH>
</environmentVariables>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
<artifactId>maven-antrun-plugin</artifactId> <artifactId>maven-antrun-plugin</artifactId>
<version>1.6</version>
<executions> <executions>
<execution> <execution>
<id>generate</id> <id>generate</id>
@ -472,12 +477,50 @@
<tasks> <tasks>
<replace file="${project.build.outputDirectory}/hbase-default.xml" <replace file="${project.build.outputDirectory}/hbase-default.xml"
token="@@@VERSION@@@" value="${project.version}" /> token="@@@VERSION@@@" value="${project.version}" />
<mkdir dir="${project.build.directory}/nativelib"/>
<exec executable="tar" dir="${project.build.directory}/nativelib" failonerror="false">
<arg value="xf"/>
<arg value="hadoop-snappy-nativelibs.tar"/>
</exec>
</tasks> </tasks>
</configuration> </configuration>
<goals> <goals>
<goal>run</goal> <goal>run</goal>
</goals> </goals>
</execution> </execution>
<execution>
<id>package</id>
<phase>package</phase>
<configuration>
<target>
<!-- Complements the assembly -->
<mkdir dir="${project.build.directory}/${project.artifactId}-${project.version}/${project.artifactId}-${project.version}/lib/native/${build.platform}"/>
<!-- Using Unix cp to preserve symlinks, using script to handle wildcards -->
<echo file="${project.build.directory}/copynativelibs.sh">
if [ `ls ${project.build.directory}/nativelib | wc -l` -ne 0 ]; then
cp -PR ${project.build.directory}/nativelib/lib* ${project.build.directory}/${project.artifactId}-${project.version}/${project.artifactId}-${project.version}/lib/native/${build.platform}
fi
</echo>
<exec executable="sh" dir="${project.build.directory}" failonerror="true">
<arg line="./copynativelibs.sh"/>
</exec>
<!-- Using Unix tar to preserve symlinks -->
<exec executable="tar" failonerror="yes"
dir="${project.build.directory}/${project.artifactId}-${project.version}">
<arg value="czf"/>
<arg value="${project.build.directory}/${project.artifactId}-${project.version}.tar.gz"/>
<arg value="."/>
</exec>
</target>
</configuration>
<goals>
<goal>run</goal>
</goals>
</execution>
</executions> </executions>
</plugin> </plugin>
<plugin> <plugin>
@ -587,6 +630,7 @@
<stax-api.version>1.0.1</stax-api.version> <stax-api.version>1.0.1</stax-api.version>
<thrift.version>0.6.1</thrift.version> <thrift.version>0.6.1</thrift.version>
<zookeeper.version>3.3.3</zookeeper.version> <zookeeper.version>3.3.3</zookeeper.version>
<hadoop-snappy.version>0.0.1-cdh3u1-SNAPSHOT</hadoop-snappy.version>
<package.prefix>/usr</package.prefix> <package.prefix>/usr</package.prefix>
<package.conf.dir>/etc/hbase</package.conf.dir> <package.conf.dir>/etc/hbase</package.conf.dir>
@ -978,6 +1022,78 @@
</plugins> </plugins>
</build> </build>
</profile> </profile>
<profile>
<id>os.linux</id>
<activation>
<activeByDefault>false</activeByDefault>
<os>
<family>Linux</family>
</os>
</activation>
<properties>
<build.platform>${os.name}-${os.arch}-${sun.arch.data.model}</build.platform>
</properties>
</profile>
<profile>
<id>os.mac</id>
<activation>
<os>
<family>Mac</family>
</os>
</activation>
<properties>
<build.platform>Mac_OS_X-${sun.arch.data.model}</build.platform>
</properties>
</profile>
<profile>
<id>hadoop-snappy</id>
<activation>
<activeByDefault>false</activeByDefault>
<property>
<name>snappy</name>
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-snappy</artifactId>
<version>${hadoop-snappy.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>get-hadoop-snappy-native</id>
<phase>generate-resources</phase>
<goals>
<goal>copy</goal>
</goals>
<configuration>
<artifactItems>
<artifactItem>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-snappy</artifactId>
<version>${hadoop-snappy.version}</version>
<classifier>${build.platform}</classifier>
<type>tar</type>
<overWrite>false</overWrite>
<outputDirectory>${project.build.directory}/nativelib</outputDirectory>
<destFileName>hadoop-snappy-nativelibs.tar</destFileName>
</artifactItem>
</artifactItems>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles> </profiles>
<!-- See http://jira.codehaus.org/browse/MSITE-443 why the settings need to be here and not in pluginManagement. --> <!-- See http://jira.codehaus.org/browse/MSITE-443 why the settings need to be here and not in pluginManagement. -->

View File

@ -7,7 +7,7 @@
--> -->
<id>all</id> <id>all</id>
<formats> <formats>
<format>tar.gz</format> <format>dir</format>
</formats> </formats>
<fileSets> <fileSets>
<fileSet> <fileSet>

View File

@ -1408,23 +1408,19 @@ false
SNAPPY SNAPPY
</title> </title>
<para> <para>
To set SNAPPY compression on a column family, do as following: If snappy is installed, HBase can make use of it (courtesy of
<link xlink:href="http://code.google.com/p/hadoop-snappy/">hadoop-snappy</link>).
<orderedlist> <orderedlist>
<listitem> <listitem>
<para> <para>
Install hadoop-snappy using these instructions: http://code.google.com/p/hadoop-snappy/ Build and install <link xlink:href="http://code.google.com/p/snappy/">snappy</link> on all nodes
of your cluster.
</para> </para>
</listitem> </listitem>
<listitem> <listitem>
<para> <para>
You need to ensure the hadoop-snappy libs (incl. the native libs) are in the HBase classpath. One way to do this is Use CompressionTest to verify snappy support is enabled and the libs can be loaded ON ALL NODES of your cluster:
to just symlink the libs from <filename>HADOOP_HOME/lib</filename> to <filename>HBASE_HOME/lib</filename>.
This needs to be done on all HBase nodes, as with LZO.
</para>
</listitem>
<listitem>
<para>
Use CompressionTest to verify snappy support is enabled and the libs can be loaded:
<programlisting>$ hbase org.apache.hadoop.hbase.util.CompressionTest hdfs://host/path/to/hbase snappy</programlisting> <programlisting>$ hbase org.apache.hadoop.hbase.util.CompressionTest hdfs://host/path/to/hbase snappy</programlisting>
</para> </para>
</listitem> </listitem>

View File

@ -10,6 +10,12 @@
<title>Building HBase</title> <title>Building HBase</title>
<para>This chapter will be of interest only to those building HBase from source. <para>This chapter will be of interest only to those building HBase from source.
</para> </para>
<section xml:id="build.snappy">
<title>Building in snappy compression support</title>
<p>Pass <code>-Dsnappy</code> to trigger the snappy maven profile for building
snappy native libs into hbase.</p>
</section>
<section xml:id="mvn_repo"> <section xml:id="mvn_repo">
<title>Adding an HBase release to Apache's Maven Repository</title> <title>Adding an HBase release to Apache's Maven Repository</title>
<para>Follow the instructions at <para>Follow the instructions at

View File

@ -54,6 +54,23 @@ public class TestCompressionTest {
assertFalse(CompressionTest.testCompression("LZO")); assertFalse(CompressionTest.testCompression("LZO"));
assertTrue(CompressionTest.testCompression("NONE")); assertTrue(CompressionTest.testCompression("NONE"));
assertTrue(CompressionTest.testCompression("GZ")); assertTrue(CompressionTest.testCompression("GZ"));
assertFalse(CompressionTest.testCompression("SNAPPY"));
if (isCompressionAvailable("org.apache.hadoop.io.compress.SnappyCodec")) {
assertTrue(CompressionTest.testCompression("SNAPPY"));
}
else {
assertFalse(CompressionTest.testCompression("SNAPPY"));
}
} }
private boolean isCompressionAvailable(String codecClassName) {
try {
Thread.currentThread().getContextClassLoader().loadClass(codecClassName);
return true;
}
catch (Exception ex) {
return false;
}
}
} }