HBASE-19552 Update hbase-thirdparty version

Some manual cleanup of changing package names in pom files and getting
rid of the no-longer-needed netty system property.

This commit will break compilation, package renames in source code are
done in follow-on commits using straightforward find and replace.

's/org.apache.hadoop.hbase.shaded.com.google/org.apache.hbase.thirdparty.com.google/'
's/org.apache.hadoop.hbase.shaded.io.netty/org.apache.hbase.thirdparty.io.netty/'
This commit is contained in:
Mike Drob 2017-12-27 10:58:47 -06:00
parent 78013a0c1e
commit ea7d5fc884
8 changed files with 16 additions and 78 deletions

View File

@ -480,16 +480,9 @@ fi
HEAP_SETTINGS="$JAVA_HEAP_MAX $JAVA_OFFHEAP_MAX"
# Exec unless HBASE_NOEXEC is set.
export CLASSPATH
# Netty is shaded in hbase. The shaded netty is pulled in with org.apache.hbase.thirdparty:hbase-shaded-netty.
# It has a .so in it. Shading requires rename of the .so and then passing a system property so netty finds the
# renamed .so and associates it w/ the relocated netty files.
# So we can find the relocated .so, we need to add a system property pointing at new location. Trick is from:
# https://stackoverflow.com/questions/33825743/rename-files-inside-a-jar-using-some-maven-plugin
# See toward the end of this issue for how to pass config: https://github.com/netty/netty/issues/6665
#
NETTY_PROP="-Dorg.apache.hadoop.hbase.shaded.io.netty.packagePrefix=org.apache.hadoop.hbase.shaded."
if [ "${HBASE_NOEXEC}" != "" ]; then
"$JAVA" -Dproc_$COMMAND "$NETTY_PROP" -XX:OnOutOfMemoryError="kill -9 %p" $HEAP_SETTINGS $HBASE_OPTS $CLASS "$@"
"$JAVA" -Dproc_$COMMAND -XX:OnOutOfMemoryError="kill -9 %p" $HEAP_SETTINGS $HBASE_OPTS $CLASS "$@"
else
exec "$JAVA" -Dproc_$COMMAND "$NETTY_PROP" -XX:OnOutOfMemoryError="kill -9 %p" $HEAP_SETTINGS $HBASE_OPTS $CLASS "$@"
exec "$JAVA" -Dproc_$COMMAND -XX:OnOutOfMemoryError="kill -9 %p" $HEAP_SETTINGS $HBASE_OPTS $CLASS "$@"
fi

View File

@ -67,7 +67,7 @@
http://checkstyle.sourceforge.net/config_imports.html -->
<module name="AvoidStarImport"/>
<module name="ImportOrder">
<property name="groups" value="*,org.apache.hadoop.hbase.shaded"/>
<property name="groups" value="*,org.apache.hbase.thirdparty,org.apache.hadoop.hbase.shaded"/>
<property name="option" value="top" />
<property name="ordered" value="true"/>
<property name="sortStaticImportsAlphabetically" value="true"/>

View File

@ -76,9 +76,6 @@
<value>org.apache.hadoop.hbase.ServerResourceCheckerJUnitListener</value>
</property>
</properties>
<systemPropertyVariables>
<org.apache.hadoop.hbase.shaded.io.netty.packagePrefix>org.apache.hadoop.hbase.shaded.</org.apache.hadoop.hbase.shaded.io.netty.packagePrefix>
</systemPropertyVariables>
</configuration>
</plugin>
<!-- Make a jar and put the sources in the jar -->

View File

@ -120,7 +120,7 @@
<replacements>
<replacement>
<token>([^\.])com.google.protobuf</token>
<value>$1org.apache.hadoop.hbase.shaded.com.google.protobuf</value>
<value>$1org.apache.hbase.thirdparty.com.google.protobuf</value>
</replacement>
<replacement>
<token>(public)(\W+static)?(\W+final)?(\W+class)</token>

View File

@ -245,7 +245,6 @@
</properties>
<systemPropertyVariables>
<test.build.webapps>target/test-classes/webapps</test.build.webapps>
<org.apache.hadoop.hbase.shaded.io.netty.packagePrefix>org.apache.hadoop.hbase.shaded.</org.apache.hadoop.hbase.shaded.io.netty.packagePrefix>
</systemPropertyVariables>
</configuration>
</plugin>

View File

@ -618,9 +618,6 @@
<junitxml>.</junitxml>
<filereports>WDF TestSuite.txt</filereports>
<parallel>false</parallel>
<systemProperties>
<org.apache.hadoop.hbase.shaded.io.netty.packagePrefix>org.apache.hadoop.hbase.shaded.</org.apache.hadoop.hbase.shaded.io.netty.packagePrefix>
</systemProperties>
</configuration>
<executions>
<execution>
@ -630,11 +627,7 @@
<goal>test</goal>
</goals>
<configuration>
<systemProperties>
<org.apache.hadoop.hbase.shaded.io.netty.packagePrefix>org.apache.hadoop.hbase.shaded.</org.apache.hadoop.hbase.shaded.io.netty.packagePrefix>
</systemProperties>
<argLine> -Xmx1536m -XX:ReservedCodeCacheSize=512m
</argLine>
<argLine>-Xmx1536m -XX:ReservedCodeCacheSize=512m</argLine>
<parallel>false</parallel>
</configuration>
</execution>

24
pom.xml
View File

@ -667,25 +667,13 @@
<redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile>
<systemPropertyVariables>
<test.build.classes>${test.build.classes}</test.build.classes>
<!--For shaded netty, to find the relocated .so.
Trick from
https://stackoverflow.com/questions/33825743/rename-files-inside-a-jar-using-some-maven-plugin
The netty jar has a .so in it. Shading requires rename of the .so and then passing a system
property so netty finds the renamed .so and associates it w/ the relocated netty files.
The relocated netty is in hbase-thirdparty dependency. Just set this propery globally rather
than per module.
-->
<org.apache.hadoop.hbase.shaded.io.netty.packagePrefix>org.apache.hadoop.hbase.shaded.</org.apache.hadoop.hbase.shaded.io.netty.packagePrefix>
</systemPropertyVariables>
<excludes>
<!-- users can add -D option to skip particular test classes
ex: mvn test -Dtest.exclude.pattern=**/TestFoo.java,**/TestBar.java
-->
<exclude>${test.exclude.pattern}</exclude>
</excludes>
<!-- users can add -D option to skip particular test classes
ex: mvn test -Dtest.exclude.pattern=**/TestFoo.java,**/TestBar.java
-->
<exclude>${test.exclude.pattern}</exclude>
</excludes>
</configuration>
<executions>
<execution>
@ -1502,7 +1490,7 @@
<spotbugs.version>3.1.0-RC3</spotbugs.version>
<wagon.ssh.version>2.12</wagon.ssh.version>
<xml.maven.version>1.0.1</xml.maven.version>
<hbase-thirdparty.version>1.0.1</hbase-thirdparty.version>
<hbase-thirdparty.version>2.0.0</hbase-thirdparty.version>
<!-- General Packaging -->
<package.prefix>/usr</package.prefix>
<package.conf.dir>/etc/hbase</package.conf.dir>

View File

@ -2186,57 +2186,25 @@ libraries such as guava, netty, and protobuf. The mainline HBase project
relies on the relocated versions of these libraries gotten from hbase-thirdparty
rather than on finding these classes in their usual locations. We do this so
we can specify whatever the version we wish. If we don't relocate, we must
harmonize our version to match that which hadoop and/or spark uses.
harmonize our version to match that which hadoop, spark, and other projects use.
For developers, this means you need to be careful referring to classes from
netty, guava, protobuf, gson, etc. (see the hbase-thirdparty pom.xml for what
it provides). Devs must refer to the hbase-thirdparty provided classes. In
practice, this is usually not an issue (though it can be a bit of a pain). You
will have to hunt for the relocated version of your particular class. You'll
find it by prepending the general relocation prefix of `org.apache.hadoop.hbase.shaded.`.
find it by prepending the general relocation prefix of `org.apache.hbase.thirdparty.`.
For example if you are looking for `com.google.protobuf.Message`, the relocated
version used by HBase internals can be found at
`org.apache.hadoop.hbase.shaded.com.google.protobuf.Message`.
`org.apache.hbase.thirdparty.com.google.protobuf.Message`.
For a few thirdparty libs, like protobuf (see the protobuf chapter in this book
for the why), your IDE may give you both options -- the `com.google.protobuf.*`
and the `org.apache.hadoop.hbase.shaded.com.google.protobuf.*` -- because both
and the `org.apache.hbase.thirdparty.com.google.protobuf.*` -- because both
classes are on your CLASSPATH. Unless you are doing the particular juggling
required in Coprocessor Endpoint development (again see above cited protobuf
chapter), you'll want to use the shaded version, always.
Of note, the relocation of netty is particular. The netty folks have put in
place facility to aid relocation; it seems like shading netty is a popular project.
One case of this requires the setting of a peculiar system property on the JVM
so that classes out in the bundld shared library (.so) can be found in their
relocated location. Here is the property that needs to be set:
`-Dorg.apache.hadoop.hbase.shaded.io.netty.packagePrefix=org.apache.hadoop.hbase.shaded.`
(Note that the trailing '.' is required). Starting hbase normally or when running
test suites, the setting of this property is done for you. If you are doing something
out of the ordinary, starting hbase from your own context, you'll need to provide
this property on platforms that favor the bundled .so. See release notes on HBASE-18271
for more. The complaint you see is something like the following:
`Cause: java.lang.RuntimeException: Failed construction of Master: class org.apache.hadoop.hbase.master.HMasterorg.apache.hadoop.hbase.shaded.io.netty.channel.epoll.`
If running unit tests and you run into the above message, add the system property
to your surefire configuration by doing like the below:
[source,xml]
----
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<systemPropertyVariables>
<org.apache.hadoop.hbase.shaded.io.netty.packagePrefix>org.apache.hadoop.hbase.shaded.</org.apache.hadoop.hbase.shaded.io.netty.packagePrefix>
</systemPropertyVariables>
</configuration>
</plugin>
----
Again the trailing period in the value above is intended.
The `hbase-thirdparty` project has groupid of `org.apache.hbase.thirdparty`.
As of this writing, it provides three jars; one for netty with an artifactid of
`hbase-thirdparty-netty`, one for protobuf at `hbase-thirdparty-protobuf` and then