HBASE-20332 shaded mapreduce module shouldn't include hadoop

* modify the jar checking script to take args; make hadoop stuff optional
* separate out checking the artifacts that have hadoop vs those that don't.
* * Unfortunately means we need two modules for checking things
* * put in a safety check that the support script for checking jar contents is maintained in both modules
* * have to carve out an exception for o.a.hadoop.metrics2. :(
* fix duplicated class warning
* clean up dependencies in hbase-server and some modules that depend on it.
* allow Hadoop to have its own htrace where it needs it
* add a precommit check to make sure we're not using old htrace imports
This commit is contained in:
Sean Busbey 2018-04-09 13:37:44 -05:00
parent ac5bb8155b
commit f1b536bad4
25 changed files with 791 additions and 267 deletions

View File

@ -154,10 +154,6 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId>
@ -264,9 +260,6 @@
<value>3.0</value>
</property>
</activation>
<properties>
<hadoop.version>3.0-SNAPSHOT</hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
@ -276,6 +269,11 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-distcp</artifactId>
<version>${hadoop.version}</version>
</dependency>
</dependencies>
</profile>
</profiles>

View File

@ -2,6 +2,13 @@
<!DOCTYPE suppressions PUBLIC
"-//Puppy Crawl//DTD Suppressions 1.0//EN"
"http://www.puppycrawl.com/dtds/suppressions_1_0.dtd">
<!--
TODO Update to use the message suppression filter once we can update
to checkstyle 8.6+
<!DOCTYPE suppressions PUBLIC
"-//Checkstyle//DTD SuppressionFilter Configuration 1.2//EN"
"https://checkstyle.org/dtds/suppressions_1_2.dtd">
-->
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
@ -39,4 +46,8 @@
<suppress checks="EmptyBlockCheck" files="TBoundedThreadPoolServer.java"/>
<suppress checks="EqualsHashCode" files="StartcodeAgnosticServerName.java"/>
<suppress checks="MethodLength" files="Branch1CoprocessorMethods.java"/>
<!--
TODO use message filter once we can upgrade to checkstyle 8.6+
<suppress checks="IllegalImport" message="org\.apache\.htrace\.core"/>
-->
</suppressions>

View File

@ -77,6 +77,12 @@
<property name="processJavadoc" value="true"/>
</module>
<module name="IllegalImport">
<!--
TODO include the o.a.htrace package for HTrace v3 once we can upgrade
to checkstyle 8.6 plus and use a message filter to suppress
errors from proper use of o.a.htrace.core
org.apache.htrace,
-->
<property name="illegalPkgs" value="
com.google.common,
io.netty,
@ -87,7 +93,8 @@
org.apache.curator.shaded,
org.apache.hadoop.classification,
org.apache.htrace.shaded,
org.codehaus.jackson"/>
org.codehaus.jackson,
org.htrace"/>
<property name="illegalClasses" value="
org.apache.commons.logging.Log,
org.apache.commons.logging.LogFactory"/>

View File

@ -234,10 +234,6 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId>

View File

@ -314,12 +314,6 @@
<artifactId>hadoop-common</artifactId>
<!--FYI This pulls in hadoop's guava. Its needed for Configuration
at least-->
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>
@ -366,12 +360,6 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>

View File

@ -260,12 +260,6 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
@ -296,10 +290,6 @@
<artifactId>hadoop-minicluster</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
@ -343,10 +333,6 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>

View File

@ -232,12 +232,6 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>
@ -287,12 +281,6 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>

View File

@ -224,10 +224,6 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>

View File

@ -167,12 +167,6 @@ limitations under the License.
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>

View File

@ -335,12 +335,6 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>

View File

@ -196,6 +196,15 @@
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<exclusions>
<!-- commons-logging is only used by hbase-http's HttpRequestLog and hbase-server's
HBaseTestingUtil. We don't need either of those here, so execlude it.
-->
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
@ -246,10 +255,19 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<!-- jackson(s) used by PerformanceEvaluation and it looks like TableMapReduceUtil -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
</dependency>
</dependencies>
<profiles>
<!-- Skip the tests in this module -->
@ -284,10 +302,6 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId>
@ -334,10 +348,6 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId>
@ -377,10 +387,6 @@
<artifactId>hadoop-minicluster</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>

View File

@ -155,10 +155,6 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId>

View File

@ -299,6 +299,19 @@
<groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-json-provider</artifactId>
</dependency>
<dependency>
<!-- We *might* need this for XMLStreamReader use in RemoteAdmin
TODO figure out if we can remove it.
-->
<groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId>
<exclusions>
<exclusion>
<groupId>stax</groupId>
<artifactId>stax-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<!--For JspC used in ant task-->
<groupId>org.glassfish.web</groupId>
@ -374,12 +387,6 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>

View File

@ -198,10 +198,6 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId>

View File

@ -71,6 +71,14 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-remote-resources-plugin</artifactId>
<version>1.5</version>
<dependencies>
<!-- resource bundle only needed at build time -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-resource-bundle</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>default</id>
@ -392,12 +400,6 @@
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-metrics</artifactId>
</dependency>
<!-- resource bundle only needed at build time -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-resource-bundle</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
@ -437,19 +439,16 @@
<artifactId>jetty-webapp</artifactId>
</dependency>
<dependency>
<!--For JspC used in ant task-->
<!-- For JspC used in ant task, then needed at compile /runtime
because the source code made from the JSP refers to its runtime
-->
<groupId>org.glassfish.web</groupId>
<artifactId>javax.servlet.jsp</artifactId>
</dependency>
<!-- Also used by generated sources from our JSP -->
<dependency>
<groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId>
<exclusions>
<exclusion>
<groupId>stax</groupId>
<artifactId>stax-api</artifactId>
</exclusion>
</exclusions>
<groupId>javax.servlet.jsp</groupId>
<artifactId>javax.servlet.jsp-api</artifactId>
</dependency>
<!-- General dependencies -->
<dependency>
@ -501,9 +500,20 @@
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
</dependency>
<!-- Jackson only used in compile/runtime scope by BlockCacheUtil class
also used by some tests
-->
<dependency>
<groupId>javax.ws.rs</groupId>
<artifactId>javax.ws.rs-api</artifactId>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
</dependency>
<!-- tracing Dependencies -->
@ -511,11 +521,6 @@
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core4</artifactId>
</dependency>
<dependency>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
<version>${htrace-hadoop.version}</version>
</dependency>
<dependency>
<groupId>com.lmax</groupId>
<artifactId>disruptor</artifactId>
@ -556,6 +561,15 @@
<artifactId>httpcore</artifactId>
<scope>test</scope>
</dependency>
<!-- commons-logging is used by HBTU to monkey with log levels
have to put it at compile scope because Hadoop's IOUtils uses it
both for hadoop 2.7 and 3.0, so we'll fail at compile if it's at test scope.
-->
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-crypto</artifactId>
@ -674,34 +688,10 @@
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-distcp</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
@ -794,11 +784,6 @@
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-distcp</artifactId>
<version>${hadoop-three.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
@ -809,8 +794,24 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<version>${hadoop-three.version}</version>
<artifactId>hadoop-hdfs-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>

View File

@ -26,7 +26,7 @@
Enforces our invariants for our shaded artifacts. e.g. shaded clients have
a specific set of transitive dependencies and shaded clients only contain
classes that are in particular packages. Does the enforcement through
the maven-enforcer-plugin and and integration test.
the maven-enforcer-plugin and integration test.
</description>
<name>Apache HBase Shaded Packaging Invariants</name>
@ -34,11 +34,15 @@
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-shaded-client</artifactId>
<version>${project.version}</version>
</dependency>
<!-- Include here any client facing artifacts that presume
the runtime environment will have hadoop.
If our checks fail for the shaded mapreduce artifact,
then probably a dependency from hadoop has shown up
in the hbase-mapreduce module without being flagged
as 'provided' scope. See the note by the relevant
hadoop profile in that module.
-->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-shaded-mapreduce</artifactId>
@ -113,6 +117,8 @@
<exclude>com.github.stephenc.findbugs:*</exclude>
<!-- We leave HTrace as an unshaded dependnecy on purpose so that tracing within a JVM will work -->
<exclude>org.apache.htrace:*</exclude>
<!-- Our public API requires Hadoop at runtime to work -->
<exclude>org.apache.hadoop:*</exclude>
</excludes>
</banTransitiveDependencies>
<banDuplicateClasses>
@ -158,18 +164,37 @@
</execution>
</executions>
</plugin>
<!--
Check that we actually relocated everything we included.
It's critical that we don't ship third party dependencies that haven't
been relocated under our pacakge space, since this will lead to
difficult to debug classpath errors for downstream. Unfortunately, that
means inspecting all the jars.
-->
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>1.6.0</version>
<executions>
<!-- It's easier to have two copies of our validation
script than to copy it via remote-resources-plugin, but
we need to make sure they stay the same.
-->
<execution>
<id>make-sure-validation-files-are-in-sync</id>
<phase>validate</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>diff</executable>
<requiresOnline>false</requiresOnline>
<arguments>
<argument>../hbase-shaded-check-invariants/src/test/resources/ensure-jars-have-correct-contents.sh</argument>
<argument>../hbase-shaded-with-hadoop-check-invariants/src/test/resources/ensure-jars-have-correct-contents.sh</argument>
</arguments>
</configuration>
</execution>
<!--
Check that we actually relocated everything we included.
It's critical that we don't ship third party dependencies that haven't
been relocated under our package space, since this will lead to
difficult to debug classpath errors for downstream. Unfortunately, that
means inspecting all the jars.
-->
<execution>
<id>check-jar-contents</id>
<phase>integration-test</phase>
@ -180,6 +205,9 @@
<executable>${shell-executable}</executable>
<workingDirectory>${project.build.testOutputDirectory}</workingDirectory>
<requiresOnline>false</requiresOnline>
<!-- Important that we don't pass the 'allow-hadoop' flag here, because
we allowed it as a provided dependency above.
-->
<arguments>
<argument>ensure-jars-have-correct-contents.sh</argument>
<argument>${hbase-client-artifacts}</argument>

View File

@ -15,33 +15,67 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# Usage: $0 [/path/to/some/example.jar:/path/to/another/example/created.jar]
#
# accepts a single command line argument with a colon separated list of
# paths to jars to check. Iterates through each such passed jar and checks
# all the contained paths to make sure they follow the below constructed
# safe list.
set -e
function usage {
echo "Usage: ${0} [options] [/path/to/some/example.jar:/path/to/another/example/created.jar]"
echo ""
echo " accepts a single command line argument with a colon separated list of"
echo " paths to jars to check. Iterates through each such passed jar and checks"
echo " all the contained paths to make sure they follow the below constructed"
echo " safe list."
echo ""
echo " --allow-hadoop Include stuff from the Apache Hadoop project in the list"
echo " of allowed jar contents. default: false"
echo " --debug print more info to stderr"
exit 1
}
# if no args specified, show usage
if [ $# -lt 1 ]; then
usage
fi
# we have to allow the directories that lead to the org/apache/hadoop dir
allowed_expr="(^org/$|^org/apache/$"
# Get arguments
declare allow_hadoop
declare debug
while [ $# -gt 0 ]
do
case "$1" in
--allow-hadoop) shift; allow_hadoop="true";;
--debug) shift; debug="true";;
--) shift; break;;
-*) usage ;;
*) break;; # terminate while loop
esac
done
# should still have jars to check.
if [ $# -lt 1 ]; then
usage
fi
if [ -n "${debug}" ]; then
echo "[DEBUG] Checking on jars: $*" >&2
echo "jar command is: $(which jar)" >&2
echo "grep command is: $(which grep)" >&2
grep -V >&2 || true
fi
IFS=: read -r -d '' -a artifact_list < <(printf '%s\0' "$1")
# we have to allow the directories that lead to the hbase dirs
allowed_expr="(^org/$|^org/apache/$|^org/apache/hadoop/$"
# We allow the following things to exist in our client artifacts:
# * classes in packages that start with org.apache.hadoop, which by
# convention should be in a path that looks like org/apache/hadoop
allowed_expr+="|^org/apache/hadoop/"
# * classes in packages that start with org.apache.hadoop.hbase, which by
# convention should be in a path that looks like org/apache/hadoop/hbase
allowed_expr+="|^org/apache/hadoop/hbase"
# * classes in packages that start with org.apache.hbase
allowed_expr+="|^org/apache/hbase/"
# * whatever in the "META-INF" directory
allowed_expr+="|^META-INF/"
# * the folding tables from jcodings
allowed_expr+="|^tables/"
# * Hadoop's and HBase's default configuration files, which have the form
# * HBase's default configuration files, which have the form
# "_module_-default.xml"
allowed_expr+="|^[^-]*-default.xml$"
# * Hadoop's versioning properties files, which have the form
# "_module_-version-info.properties"
allowed_expr+="|^[^-]*-version-info.properties$"
# * Hadoop's application classloader properties file.
allowed_expr+="|^org.apache.hadoop.application-classloader.properties$"
allowed_expr+="|^hbase-default.xml$"
# public suffix list used by httpcomponents
allowed_expr+="|^mozilla/$"
allowed_expr+="|^mozilla/public-suffix-list.txt$"
@ -51,12 +85,30 @@ allowed_expr+="|^properties.dtd$"
allowed_expr+="|^PropertyList-1.0.dtd$"
if [ -n "${allow_hadoop}" ]; then
# * classes in packages that start with org.apache.hadoop, which by
# convention should be in a path that looks like org/apache/hadoop
allowed_expr+="|^org/apache/hadoop/"
# * Hadoop's default configuration files, which have the form
# "_module_-default.xml"
allowed_expr+="|^[^-]*-default.xml$"
# * Hadoop's versioning properties files, which have the form
# "_module_-version-info.properties"
allowed_expr+="|^[^-]*-version-info.properties$"
# * Hadoop's application classloader properties file.
allowed_expr+="|^org.apache.hadoop.application-classloader.properties$"
else
# We have some classes for integrating with the Hadoop Metrics2 system
# that have to be in a particular package space due to access rules.
allowed_expr+="|^org/apache/hadoop/metrics2"
fi
allowed_expr+=")"
declare -i bad_artifacts=0
declare -a bad_contents
IFS=: read -r -d '' -a artifact_list < <(printf '%s\0' "$1")
for artifact in "${artifact_list[@]}"; do
bad_contents=($(jar tf "${artifact}" | grep -v -E "${allowed_expr}"))
bad_contents=($(jar tf "${artifact}" | grep -v -E "${allowed_expr}" || true))
if [ ${#bad_contents[@]} -gt 0 ]; then
echo "[ERROR] Found artifact with unexpected contents: '${artifact}'"
echo " Please check the following and either correct the build or update"

View File

@ -62,6 +62,10 @@
</plugins>
</build>
<dependencies>
<!--
We want to ensure needed hadoop bits are at provided scope for our shaded
artifact, so we list them below in hadoop specific profiles.
-->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-mapreduce</artifactId>
@ -137,10 +141,6 @@
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId>
</exclusion>
<exclusion>
<groupId>org.glassfish.web</groupId>
<artifactId>javax.servlet.jsp</artifactId>
</exclusion>
<exclusion>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-server</artifactId>
@ -149,6 +149,17 @@
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
</exclusion>
<!-- We excluded the server-side generated classes for JSP, so exclude
their runtime support libraries too
-->
<exclusion>
<groupId>org.glassfish.web</groupId>
<artifactId>javax.servlet.jsp</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>javax.servlet.jsp-api</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
@ -158,12 +169,175 @@
<id>release</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
</plugin>
<!-- Tell the shade plugin we want to leave Hadoop as a dependency -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<executions>
<execution>
<id>aggregate-into-a-jar-with-relocated-third-parties</id>
<configuration>
<artifactSet>
<excludes>
<exclude>org.apache.hadoop:*</exclude>
<!-- The rest of these should be kept in sync with the parent pom -->
<exclude>org.apache.hbase:hbase-resource-bundle</exclude>
<exclude>org.slf4j:*</exclude>
<exclude>com.google.code.findbugs:*</exclude>
<exclude>com.github.stephenc.findbugs:*</exclude>
<exclude>org.apache.htrace:*</exclude>
<exclude>org.apache.yetus:*</exclude>
<exclude>log4j:*</exclude>
<exclude>commons-logging:*</exclude>
</excludes>
</artifactSet>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<!-- These hadoop profiles should be derived from those in the hbase-mapreduce
module. Essentially, you must list the same hadoop-* dependencies
since provided dependencies are not transitively included.
-->
<!-- profile against Hadoop 2.x: This is the default. -->
<profile>
<id>hadoop-2.0</id>
<activation>
<property>
<!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h2--><name>!hadoop.profile</name>
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jetty</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>tomcat</groupId>
<artifactId>jasper-compiler</artifactId>
</exclusion>
<exclusion>
<groupId>tomcat</groupId>
<artifactId>jasper-runtime</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
</exclusion>
<exclusion>
<groupId>stax</groupId>
<artifactId>stax-api</artifactId>
</exclusion>
<exclusion>
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
</exclusion>
</exclusions>
<version>${hadoop-two.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</profile>
<!--
profile for building against Hadoop 3.0.x. Activate using:
mvn -Dhadoop.profile=3.0
-->
<profile>
<id>hadoop-3.0</id>
<activation>
<property>
<name>hadoop.profile</name>
<value>3.0</value>
</property>
</activation>
<properties>
<hadoop.version>${hadoop-three.version}</hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</profile>
</profiles>
</project>

View File

@ -0,0 +1,215 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>hbase</artifactId>
<groupId>org.apache.hbase</groupId>
<version>3.0.0-SNAPSHOT</version>
<relativePath>../..</relativePath>
</parent>
<artifactId>hbase-shaded-with-hadoop-check-invariants</artifactId>
<packaging>pom</packaging>
<description>
Enforces our invariants for our shaded artifacts. e.g. shaded clients have
a specific set of transitive dependencies and shaded clients only contain
classes that are in particular packages. Does the enforcement through
the maven-enforcer-plugin and integration test.
</description>
<name>Apache HBase Shaded Packaging Invariants (with Hadoop bundled)</name>
<properties>
</properties>
<dependencies>
<!-- This should only be client facing artifacts that bundle
Apache Hadoop related artifacts.
-->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-shaded-client</artifactId>
<version>${project.version}</version>
</dependency>
<!-- parent pom defines these for children. :( :( :( -->
<dependency>
<groupId>com.github.stephenc.findbugs</groupId>
<artifactId>findbugs-annotations</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<scope>provided</scope>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>org.codehaus.mojo</groupId>
<artifactId>extra-enforcer-rules</artifactId>
<version>1.0-beta-6</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>enforce-banned-dependencies</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<skip>true</skip>
<rules>
<banTransitiveDependencies>
<!--
<message>
Our client-facing artifacts are not supposed to have additional dependencies
and one or more of them do. The output from the enforcer plugin should give
specifics.
</message>
-->
<excludes>
<!-- We leave logging stuff alone -->
<exclude>org.slf4j:*</exclude>
<exclude>log4j:*</exclude>
<exclude>commons-logging:*</exclude>
<!-- annotations that never change -->
<exclude>com.google.code.findbugs:*</exclude>
<exclude>com.github.stephenc.findbugs:*</exclude>
<!-- We leave HTrace as an unshaded dependnecy on purpose so that tracing within a JVM will work -->
<exclude>org.apache.htrace:*</exclude>
<!-- NB we don't exclude Hadoop from this check here, because the assumption is any needed classes
are contained in our artifacts.
-->
</excludes>
</banTransitiveDependencies>
<banDuplicateClasses>
<findAllDuplicates>true</findAllDuplicates>
</banDuplicateClasses>
</rules>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<executions>
<execution>
<id>test-resources</id>
<phase>pre-integration-test</phase>
<goals>
<goal>testResources</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<!-- create a maven pom property that has all of our dependencies.
below in the integration-test phase we'll pass this list
of paths to our jar checker script.
-->
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>put-client-artifacts-in-a-property</id>
<phase>pre-integration-test</phase>
<goals>
<goal>build-classpath</goal>
</goals>
<configuration>
<excludeScope>provided</excludeScope>
<excludeTransitive>true</excludeTransitive>
<outputProperty>hbase-client-artifacts</outputProperty>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>1.6.0</version>
<executions>
<!-- It's easier to have two copies of our validation
script than to copy it via remote-resources-plugin, but
we need to make sure they stay the same.
-->
<execution>
<id>make-sure-validation-files-are-in-sync</id>
<phase>validate</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>diff</executable>
<requiresOnline>false</requiresOnline>
<arguments>
<argument>../hbase-shaded-with-hadoop-check-invariants/src/test/resources/ensure-jars-have-correct-contents.sh</argument>
<argument>../hbase-shaded-check-invariants/src/test/resources/ensure-jars-have-correct-contents.sh</argument>
</arguments>
</configuration>
</execution>
<!--
Check that we actually relocated everything we included.
It's critical that we don't ship third party dependencies that haven't
been relocated under our package space, since this will lead to
difficult to debug classpath errors for downstream. Unfortunately, that
means inspecting all the jars.
-->
<execution>
<id>check-jar-contents-for-stuff-with-hadoop</id>
<phase>integration-test</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${shell-executable}</executable>
<workingDirectory>${project.build.testOutputDirectory}</workingDirectory>
<requiresOnline>false</requiresOnline>
<arguments>
<argument>ensure-jars-have-correct-contents.sh</argument>
<argument>--allow-hadoop</argument>
<argument>${hbase-client-artifacts}</argument>
</arguments>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,129 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
function usage {
echo "Usage: ${0} [options] [/path/to/some/example.jar:/path/to/another/example/created.jar]"
echo ""
echo " accepts a single command line argument with a colon separated list of"
echo " paths to jars to check. Iterates through each such passed jar and checks"
echo " all the contained paths to make sure they follow the below constructed"
echo " safe list."
echo ""
echo " --allow-hadoop Include stuff from the Apache Hadoop project in the list"
echo " of allowed jar contents. default: false"
echo " --debug print more info to stderr"
exit 1
}
# if no args specified, show usage
if [ $# -lt 1 ]; then
usage
fi
# Get arguments
declare allow_hadoop
declare debug
while [ $# -gt 0 ]
do
case "$1" in
--allow-hadoop) shift; allow_hadoop="true";;
--debug) shift; debug="true";;
--) shift; break;;
-*) usage ;;
*) break;; # terminate while loop
esac
done
# should still have jars to check.
if [ $# -lt 1 ]; then
usage
fi
if [ -n "${debug}" ]; then
echo "[DEBUG] Checking on jars: $*" >&2
echo "jar command is: $(which jar)" >&2
echo "grep command is: $(which grep)" >&2
grep -V >&2 || true
fi
IFS=: read -r -d '' -a artifact_list < <(printf '%s\0' "$1")
# we have to allow the directories that lead to the hbase dirs
allowed_expr="(^org/$|^org/apache/$|^org/apache/hadoop/$"
# We allow the following things to exist in our client artifacts:
# * classes in packages that start with org.apache.hadoop.hbase, which by
# convention should be in a path that looks like org/apache/hadoop/hbase
allowed_expr+="|^org/apache/hadoop/hbase"
# * classes in packages that start with org.apache.hbase
allowed_expr+="|^org/apache/hbase/"
# * whatever in the "META-INF" directory
allowed_expr+="|^META-INF/"
# * the folding tables from jcodings
allowed_expr+="|^tables/"
# * HBase's default configuration files, which have the form
# "_module_-default.xml"
allowed_expr+="|^hbase-default.xml$"
# public suffix list used by httpcomponents
allowed_expr+="|^mozilla/$"
allowed_expr+="|^mozilla/public-suffix-list.txt$"
# Comes from commons-configuration, not sure if relocatable.
allowed_expr+="|^digesterRules.xml$"
allowed_expr+="|^properties.dtd$"
allowed_expr+="|^PropertyList-1.0.dtd$"
if [ -n "${allow_hadoop}" ]; then
# * classes in packages that start with org.apache.hadoop, which by
# convention should be in a path that looks like org/apache/hadoop
allowed_expr+="|^org/apache/hadoop/"
# * Hadoop's default configuration files, which have the form
# "_module_-default.xml"
allowed_expr+="|^[^-]*-default.xml$"
# * Hadoop's versioning properties files, which have the form
# "_module_-version-info.properties"
allowed_expr+="|^[^-]*-version-info.properties$"
# * Hadoop's application classloader properties file.
allowed_expr+="|^org.apache.hadoop.application-classloader.properties$"
else
# We have some classes for integrating with the Hadoop Metrics2 system
# that have to be in a particular package space due to access rules.
allowed_expr+="|^org/apache/hadoop/metrics2"
fi
allowed_expr+=")"
declare -i bad_artifacts=0
declare -a bad_contents
for artifact in "${artifact_list[@]}"; do
bad_contents=($(jar tf "${artifact}" | grep -v -E "${allowed_expr}" || true))
if [ ${#bad_contents[@]} -gt 0 ]; then
echo "[ERROR] Found artifact with unexpected contents: '${artifact}'"
echo " Please check the following and either correct the build or update"
echo " the allowed list with reasoning."
echo ""
for bad_line in "${bad_contents[@]}"; do
echo " ${bad_line}"
done
bad_artifacts=${bad_artifacts}+1
else
echo "[INFO] Artifact looks correct: '$(basename "${artifact}")'"
fi
done
# if there was atleast one bad artifact, exit with failure
if [ "${bad_artifacts}" -gt 0 ]; then
exit 1
fi

View File

@ -42,6 +42,7 @@
<module>hbase-shaded-client</module>
<module>hbase-shaded-mapreduce</module>
<module>hbase-shaded-check-invariants</module>
<module>hbase-shaded-with-hadoop-check-invariants</module>
</modules>
<dependencies>
<dependency>
@ -118,6 +119,7 @@
<artifactId>maven-shade-plugin</artifactId>
<executions>
<execution>
<id>aggregate-into-a-jar-with-relocated-third-parties</id>
<phase>package</phase>
<goals>
<goal>shade</goal>
@ -447,12 +449,23 @@
<exclude>META-INF/ECLIPSEF.RSA</exclude>
</excludes>
</filter>
<filter>
<!-- Duplication of classes that ship in commons-collections 2.x and 3.x
If we stop bundling a relevant commons-collections artifact we'll
need to revisit. See: https://s.apache.org/e09o
-->
<artifact>commons-beanutils:commons-beanutils-core</artifact>
<excludes>
<exclude>org/apache/commons/collections/*.class</exclude>
</excludes>
</filter>
<filter>
<!-- server side webapps that we don't need -->
<artifact>org.apache.hbase:hbase-server</artifact>
<excludes>
<exclude>hbase-webapps/*</exclude>
<exclude>hbase-webapps/**/*</exclude>
<exclude>**/*_jsp.class</exclude>
</excludes>
</filter>
<filter>

View File

@ -277,12 +277,6 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
@ -341,10 +335,6 @@
<artifactId>hadoop-minicluster</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
@ -408,10 +398,6 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>

View File

@ -140,12 +140,6 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>compile</scope>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
@ -201,10 +195,6 @@
<artifactId>hadoop-minicluster</artifactId>
<scope>compile</scope>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
@ -242,12 +232,6 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<scope>compile</scope>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>

View File

@ -503,22 +503,12 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
@ -576,12 +566,6 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>

65
pom.xml
View File

@ -1448,8 +1448,11 @@
<hadoop.guava.version>11.0.2</hadoop.guava.version>
<compat.module>hbase-hadoop2-compat</compat.module>
<assembly.file>src/main/assembly/hadoop-two-compat.xml</assembly.file>
<audience-annotations.version>0.5.0</audience-annotations.version>
<!--This property is for hadoops netty. HBase netty
comes in via hbase-thirdparty hbase-shaded-netty-->
<netty.hadoop.version>3.6.2.Final</netty.hadoop.version>
<!-- end HBASE-15925 default hadoop compatibility values -->
<audience-annotations.version>0.5.0</audience-annotations.version>
<avro.version>1.7.7</avro.version>
<commons-codec.version>1.10</commons-codec.version>
<!-- pretty outdated -->
@ -1477,7 +1480,6 @@
<junit.version>4.12</junit.version>
<hamcrest.version>1.3</hamcrest.version>
<htrace.version>4.2.0-incubating</htrace.version>
<htrace-hadoop.version>3.2.0-incubating</htrace-hadoop.version>
<log4j.version>1.2.17</log4j.version>
<mockito-core.version>2.1.0</mockito-core.version>
<!--Internally we use a different version of protobuf. See hbase-protocol-shaded-->
@ -1605,7 +1607,8 @@
org.mortbay.jetty:servlet-api, javax.servlet:servlet-api: These are excluded because they are
the same implementations. I chose org.mortbay.jetty:servlet-api-2.5 instead, which is a third
implementation of the same, because Hadoop also uses this version
javax.servlet:jsp-api in favour of org.mortbay.jetty:jsp-api-2.1
javax.servlet:jsp-api in favour of javax.servlet.jsp:javax.servlet.jsp-api:2.3.1 since it
is what glassfish's jspC jar uses and that's where we get our own need for a jsp-api.
-->
<!-- Intra-module dependencies -->
<dependency>
@ -1920,6 +1923,14 @@
<artifactId>commons-math3</artifactId>
<version>${commons-math.version}</version>
</dependency>
<dependency>
<!-- commons-logging is only used by hbase-http's HttpRequestLog and hbase-server's
HBaseTestingUtil.
-->
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.2</version>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
@ -1983,6 +1994,16 @@
<artifactId>jackson-jaxrs-json-provider</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
@ -2086,6 +2107,12 @@
<artifactId>javax.servlet.jsp</artifactId>
<version>${glassfish.jsp.version}</version>
</dependency>
<dependency>
<!-- this lib is used by the compiled Jsp from the above JspC -->
<groupId>javax.servlet.jsp</groupId>
<artifactId>javax.servlet.jsp-api</artifactId>
<version>2.3.1</version>
</dependency>
<dependency>
<groupId>org.glassfish</groupId>
<artifactId>javax.el</artifactId>
@ -2550,10 +2577,6 @@
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop-two.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId>
@ -2595,10 +2618,6 @@
<type>test-jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId>
@ -2643,10 +2662,6 @@
<artifactId>hadoop-common</artifactId>
<version>${hadoop-two.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
@ -2697,10 +2712,6 @@
<artifactId>hadoop-minicluster</artifactId>
<version>${hadoop-two.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>
@ -2819,10 +2830,6 @@
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop-three.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
@ -2868,10 +2875,6 @@
<type>test-jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId>
@ -2952,10 +2955,6 @@
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId>
@ -3020,10 +3019,6 @@
<artifactId>hadoop-minicluster</artifactId>
<version>${hadoop-three.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>