HBASE-20332 shaded mapreduce module shouldn't include hadoop

* modify the jar checking script to take args; make hadoop stuff optional
* separate out checking the artifacts that have hadoop vs those that don't.
* * Unfortunately means we need two modules for checking things
* * put in a safety check that the support script for checking jar contents is maintained in both modules
* * have to carve out an exception for o.a.hadoop.metrics2. :(
* fix duplicated class warning
* clean up dependencies in hbase-server and some modules that depend on it.
* allow Hadoop to have its own htrace where it needs it
* add a precommit check to make sure we're not using old htrace imports
This commit is contained in:
Sean Busbey 2018-04-09 13:37:44 -05:00
parent ac5bb8155b
commit f1b536bad4
25 changed files with 791 additions and 267 deletions

View File

@ -154,10 +154,6 @@
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>net.java.dev.jets3t</groupId> <groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId> <artifactId>jets3t</artifactId>
@ -264,9 +260,6 @@
<value>3.0</value> <value>3.0</value>
</property> </property>
</activation> </activation>
<properties>
<hadoop.version>3.0-SNAPSHOT</hadoop.version>
</properties>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
@ -276,6 +269,11 @@
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId> <artifactId>hadoop-mapreduce-client-core</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-distcp</artifactId>
<version>${hadoop.version}</version>
</dependency>
</dependencies> </dependencies>
</profile> </profile>
</profiles> </profiles>

View File

@ -3,6 +3,13 @@
"-//Puppy Crawl//DTD Suppressions 1.0//EN" "-//Puppy Crawl//DTD Suppressions 1.0//EN"
"http://www.puppycrawl.com/dtds/suppressions_1_0.dtd"> "http://www.puppycrawl.com/dtds/suppressions_1_0.dtd">
<!-- <!--
TODO Update to use the message suppression filter once we can update
to checkstyle 8.6+
<!DOCTYPE suppressions PUBLIC
"-//Checkstyle//DTD SuppressionFilter Configuration 1.2//EN"
"https://checkstyle.org/dtds/suppressions_1_2.dtd">
-->
<!--
/** /**
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
@ -39,4 +46,8 @@
<suppress checks="EmptyBlockCheck" files="TBoundedThreadPoolServer.java"/> <suppress checks="EmptyBlockCheck" files="TBoundedThreadPoolServer.java"/>
<suppress checks="EqualsHashCode" files="StartcodeAgnosticServerName.java"/> <suppress checks="EqualsHashCode" files="StartcodeAgnosticServerName.java"/>
<suppress checks="MethodLength" files="Branch1CoprocessorMethods.java"/> <suppress checks="MethodLength" files="Branch1CoprocessorMethods.java"/>
<!--
TODO use message filter once we can upgrade to checkstyle 8.6+
<suppress checks="IllegalImport" message="org\.apache\.htrace\.core"/>
-->
</suppressions> </suppressions>

View File

@ -77,6 +77,12 @@
<property name="processJavadoc" value="true"/> <property name="processJavadoc" value="true"/>
</module> </module>
<module name="IllegalImport"> <module name="IllegalImport">
<!--
TODO include the o.a.htrace package for HTrace v3 once we can upgrade
to checkstyle 8.6 plus and use a message filter to suppress
errors from proper use of o.a.htrace.core
org.apache.htrace,
-->
<property name="illegalPkgs" value=" <property name="illegalPkgs" value="
com.google.common, com.google.common,
io.netty, io.netty,
@ -87,7 +93,8 @@
org.apache.curator.shaded, org.apache.curator.shaded,
org.apache.hadoop.classification, org.apache.hadoop.classification,
org.apache.htrace.shaded, org.apache.htrace.shaded,
org.codehaus.jackson"/> org.codehaus.jackson,
org.htrace"/>
<property name="illegalClasses" value=" <property name="illegalClasses" value="
org.apache.commons.logging.Log, org.apache.commons.logging.Log,
org.apache.commons.logging.LogFactory"/> org.apache.commons.logging.LogFactory"/>

View File

@ -234,10 +234,6 @@
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>net.java.dev.jets3t</groupId> <groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId> <artifactId>jets3t</artifactId>

View File

@ -314,12 +314,6 @@
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<!--FYI This pulls in hadoop's guava. Its needed for Configuration <!--FYI This pulls in hadoop's guava. Its needed for Configuration
at least--> at least-->
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
</dependencies> </dependencies>
<build> <build>
@ -366,12 +360,6 @@
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
</dependencies> </dependencies>
<build> <build>

View File

@ -260,12 +260,6 @@
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
@ -296,10 +290,6 @@
<artifactId>hadoop-minicluster</artifactId> <artifactId>hadoop-minicluster</artifactId>
<scope>test</scope> <scope>test</scope>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
@ -343,10 +333,6 @@
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId> <artifactId>hadoop-minicluster</artifactId>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>

View File

@ -232,12 +232,6 @@
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
</dependencies> </dependencies>
<build> <build>
@ -287,12 +281,6 @@
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId> <artifactId>hadoop-minicluster</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
</dependencies> </dependencies>
<build> <build>

View File

@ -224,10 +224,6 @@
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>

View File

@ -167,12 +167,6 @@ limitations under the License.
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version> <version>${hadoop.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>

View File

@ -335,12 +335,6 @@
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>

View File

@ -196,6 +196,15 @@
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId> <artifactId>hbase-server</artifactId>
<exclusions>
<!-- commons-logging is only used by hbase-http's HttpRequestLog and hbase-server's
HBaseTestingUtil. We don't need either of those here, so execlude it.
-->
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
@ -246,10 +255,19 @@
<artifactId>junit</artifactId> <artifactId>junit</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!-- jackson(s) used by PerformanceEvaluation and it looks like TableMapReduceUtil -->
<dependency> <dependency>
<groupId>com.fasterxml.jackson.core</groupId> <groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId> <artifactId>jackson-databind</artifactId>
</dependency> </dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
</dependency>
</dependencies> </dependencies>
<profiles> <profiles>
<!-- Skip the tests in this module --> <!-- Skip the tests in this module -->
@ -284,10 +302,6 @@
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>net.java.dev.jets3t</groupId> <groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId> <artifactId>jets3t</artifactId>
@ -334,10 +348,6 @@
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId> <artifactId>hadoop-hdfs</artifactId>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>javax.servlet.jsp</groupId> <groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId> <artifactId>jsp-api</artifactId>
@ -377,10 +387,6 @@
<artifactId>hadoop-minicluster</artifactId> <artifactId>hadoop-minicluster</artifactId>
<scope>test</scope> <scope>test</scope>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>org.apache.zookeeper</groupId> <groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId> <artifactId>zookeeper</artifactId>

View File

@ -155,10 +155,6 @@
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>net.java.dev.jets3t</groupId> <groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId> <artifactId>jets3t</artifactId>

View File

@ -299,6 +299,19 @@
<groupId>com.fasterxml.jackson.jaxrs</groupId> <groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-json-provider</artifactId> <artifactId>jackson-jaxrs-json-provider</artifactId>
</dependency> </dependency>
<dependency>
<!-- We *might* need this for XMLStreamReader use in RemoteAdmin
TODO figure out if we can remove it.
-->
<groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId>
<exclusions>
<exclusion>
<groupId>stax</groupId>
<artifactId>stax-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<!--For JspC used in ant task--> <!--For JspC used in ant task-->
<groupId>org.glassfish.web</groupId> <groupId>org.glassfish.web</groupId>
@ -374,12 +387,6 @@
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>

View File

@ -198,10 +198,6 @@
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>net.java.dev.jets3t</groupId> <groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId> <artifactId>jets3t</artifactId>

View File

@ -71,6 +71,14 @@
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-remote-resources-plugin</artifactId> <artifactId>maven-remote-resources-plugin</artifactId>
<version>1.5</version> <version>1.5</version>
<dependencies>
<!-- resource bundle only needed at build time -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-resource-bundle</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<executions> <executions>
<execution> <execution>
<id>default</id> <id>default</id>
@ -392,12 +400,6 @@
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-metrics</artifactId> <artifactId>hbase-metrics</artifactId>
</dependency> </dependency>
<!-- resource bundle only needed at build time -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-resource-bundle</artifactId>
<optional>true</optional>
</dependency>
<dependency> <dependency>
<groupId>commons-codec</groupId> <groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId> <artifactId>commons-codec</artifactId>
@ -437,19 +439,16 @@
<artifactId>jetty-webapp</artifactId> <artifactId>jetty-webapp</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<!--For JspC used in ant task--> <!-- For JspC used in ant task, then needed at compile /runtime
because the source code made from the JSP refers to its runtime
-->
<groupId>org.glassfish.web</groupId> <groupId>org.glassfish.web</groupId>
<artifactId>javax.servlet.jsp</artifactId> <artifactId>javax.servlet.jsp</artifactId>
</dependency> </dependency>
<!-- Also used by generated sources from our JSP -->
<dependency> <dependency>
<groupId>org.codehaus.jettison</groupId> <groupId>javax.servlet.jsp</groupId>
<artifactId>jettison</artifactId> <artifactId>javax.servlet.jsp-api</artifactId>
<exclusions>
<exclusion>
<groupId>stax</groupId>
<artifactId>stax-api</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<!-- General dependencies --> <!-- General dependencies -->
<dependency> <dependency>
@ -501,9 +500,20 @@
<groupId>javax.servlet</groupId> <groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId> <artifactId>javax.servlet-api</artifactId>
</dependency> </dependency>
<!-- Jackson only used in compile/runtime scope by BlockCacheUtil class
also used by some tests
-->
<dependency> <dependency>
<groupId>javax.ws.rs</groupId> <groupId>com.fasterxml.jackson.core</groupId>
<artifactId>javax.ws.rs-api</artifactId> <artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
</dependency> </dependency>
<!-- tracing Dependencies --> <!-- tracing Dependencies -->
@ -511,11 +521,6 @@
<groupId>org.apache.htrace</groupId> <groupId>org.apache.htrace</groupId>
<artifactId>htrace-core4</artifactId> <artifactId>htrace-core4</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
<version>${htrace-hadoop.version}</version>
</dependency>
<dependency> <dependency>
<groupId>com.lmax</groupId> <groupId>com.lmax</groupId>
<artifactId>disruptor</artifactId> <artifactId>disruptor</artifactId>
@ -556,6 +561,15 @@
<artifactId>httpcore</artifactId> <artifactId>httpcore</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!-- commons-logging is used by HBTU to monkey with log levels
have to put it at compile scope because Hadoop's IOUtils uses it
both for hadoop 2.7 and 3.0, so we'll fail at compile if it's at test scope.
-->
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<scope>compile</scope>
</dependency>
<dependency> <dependency>
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-crypto</artifactId> <artifactId>commons-crypto</artifactId>
@ -674,34 +688,10 @@
</property> </property>
</activation> </activation>
<dependencies> <dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-distcp</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId> <artifactId>hadoop-mapreduce-client-core</artifactId>
@ -794,11 +784,6 @@
</property> </property>
</activation> </activation>
<dependencies> <dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-distcp</artifactId>
<version>${hadoop-three.version}</version>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
@ -809,8 +794,24 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId> <artifactId>hadoop-hdfs-client</artifactId>
<version>${hadoop-three.version}</version> <version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>

View File

@ -26,7 +26,7 @@
Enforces our invariants for our shaded artifacts. e.g. shaded clients have Enforces our invariants for our shaded artifacts. e.g. shaded clients have
a specific set of transitive dependencies and shaded clients only contain a specific set of transitive dependencies and shaded clients only contain
classes that are in particular packages. Does the enforcement through classes that are in particular packages. Does the enforcement through
the maven-enforcer-plugin and and integration test. the maven-enforcer-plugin and integration test.
</description> </description>
<name>Apache HBase Shaded Packaging Invariants</name> <name>Apache HBase Shaded Packaging Invariants</name>
@ -34,11 +34,15 @@
</properties> </properties>
<dependencies> <dependencies>
<dependency> <!-- Include here any client facing artifacts that presume
<groupId>org.apache.hbase</groupId> the runtime environment will have hadoop.
<artifactId>hbase-shaded-client</artifactId>
<version>${project.version}</version> If our checks fail for the shaded mapreduce artifact,
</dependency> then probably a dependency from hadoop has shown up
in the hbase-mapreduce module without being flagged
as 'provided' scope. See the note by the relevant
hadoop profile in that module.
-->
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-shaded-mapreduce</artifactId> <artifactId>hbase-shaded-mapreduce</artifactId>
@ -113,6 +117,8 @@
<exclude>com.github.stephenc.findbugs:*</exclude> <exclude>com.github.stephenc.findbugs:*</exclude>
<!-- We leave HTrace as an unshaded dependnecy on purpose so that tracing within a JVM will work --> <!-- We leave HTrace as an unshaded dependnecy on purpose so that tracing within a JVM will work -->
<exclude>org.apache.htrace:*</exclude> <exclude>org.apache.htrace:*</exclude>
<!-- Our public API requires Hadoop at runtime to work -->
<exclude>org.apache.hadoop:*</exclude>
</excludes> </excludes>
</banTransitiveDependencies> </banTransitiveDependencies>
<banDuplicateClasses> <banDuplicateClasses>
@ -158,18 +164,37 @@
</execution> </execution>
</executions> </executions>
</plugin> </plugin>
<!--
Check that we actually relocated everything we included.
It's critical that we don't ship third party dependencies that haven't
been relocated under our pacakge space, since this will lead to
difficult to debug classpath errors for downstream. Unfortunately, that
means inspecting all the jars.
-->
<plugin> <plugin>
<groupId>org.codehaus.mojo</groupId> <groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId> <artifactId>exec-maven-plugin</artifactId>
<version>1.6.0</version> <version>1.6.0</version>
<executions> <executions>
<!-- It's easier to have two copies of our validation
script than to copy it via remote-resources-plugin, but
we need to make sure they stay the same.
-->
<execution>
<id>make-sure-validation-files-are-in-sync</id>
<phase>validate</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>diff</executable>
<requiresOnline>false</requiresOnline>
<arguments>
<argument>../hbase-shaded-check-invariants/src/test/resources/ensure-jars-have-correct-contents.sh</argument>
<argument>../hbase-shaded-with-hadoop-check-invariants/src/test/resources/ensure-jars-have-correct-contents.sh</argument>
</arguments>
</configuration>
</execution>
<!--
Check that we actually relocated everything we included.
It's critical that we don't ship third party dependencies that haven't
been relocated under our package space, since this will lead to
difficult to debug classpath errors for downstream. Unfortunately, that
means inspecting all the jars.
-->
<execution> <execution>
<id>check-jar-contents</id> <id>check-jar-contents</id>
<phase>integration-test</phase> <phase>integration-test</phase>
@ -180,6 +205,9 @@
<executable>${shell-executable}</executable> <executable>${shell-executable}</executable>
<workingDirectory>${project.build.testOutputDirectory}</workingDirectory> <workingDirectory>${project.build.testOutputDirectory}</workingDirectory>
<requiresOnline>false</requiresOnline> <requiresOnline>false</requiresOnline>
<!-- Important that we don't pass the 'allow-hadoop' flag here, because
we allowed it as a provided dependency above.
-->
<arguments> <arguments>
<argument>ensure-jars-have-correct-contents.sh</argument> <argument>ensure-jars-have-correct-contents.sh</argument>
<argument>${hbase-client-artifacts}</argument> <argument>${hbase-client-artifacts}</argument>

View File

@ -15,33 +15,67 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# Usage: $0 [/path/to/some/example.jar:/path/to/another/example/created.jar] set -e
# function usage {
# accepts a single command line argument with a colon separated list of echo "Usage: ${0} [options] [/path/to/some/example.jar:/path/to/another/example/created.jar]"
# paths to jars to check. Iterates through each such passed jar and checks echo ""
# all the contained paths to make sure they follow the below constructed echo " accepts a single command line argument with a colon separated list of"
# safe list. echo " paths to jars to check. Iterates through each such passed jar and checks"
echo " all the contained paths to make sure they follow the below constructed"
echo " safe list."
echo ""
echo " --allow-hadoop Include stuff from the Apache Hadoop project in the list"
echo " of allowed jar contents. default: false"
echo " --debug print more info to stderr"
exit 1
}
# if no args specified, show usage
if [ $# -lt 1 ]; then
usage
fi
# we have to allow the directories that lead to the org/apache/hadoop dir # Get arguments
allowed_expr="(^org/$|^org/apache/$" declare allow_hadoop
declare debug
while [ $# -gt 0 ]
do
case "$1" in
--allow-hadoop) shift; allow_hadoop="true";;
--debug) shift; debug="true";;
--) shift; break;;
-*) usage ;;
*) break;; # terminate while loop
esac
done
# should still have jars to check.
if [ $# -lt 1 ]; then
usage
fi
if [ -n "${debug}" ]; then
echo "[DEBUG] Checking on jars: $*" >&2
echo "jar command is: $(which jar)" >&2
echo "grep command is: $(which grep)" >&2
grep -V >&2 || true
fi
IFS=: read -r -d '' -a artifact_list < <(printf '%s\0' "$1")
# we have to allow the directories that lead to the hbase dirs
allowed_expr="(^org/$|^org/apache/$|^org/apache/hadoop/$"
# We allow the following things to exist in our client artifacts: # We allow the following things to exist in our client artifacts:
# * classes in packages that start with org.apache.hadoop, which by # * classes in packages that start with org.apache.hadoop.hbase, which by
# convention should be in a path that looks like org/apache/hadoop # convention should be in a path that looks like org/apache/hadoop/hbase
allowed_expr+="|^org/apache/hadoop/" allowed_expr+="|^org/apache/hadoop/hbase"
# * classes in packages that start with org.apache.hbase # * classes in packages that start with org.apache.hbase
allowed_expr+="|^org/apache/hbase/" allowed_expr+="|^org/apache/hbase/"
# * whatever in the "META-INF" directory # * whatever in the "META-INF" directory
allowed_expr+="|^META-INF/" allowed_expr+="|^META-INF/"
# * the folding tables from jcodings # * the folding tables from jcodings
allowed_expr+="|^tables/" allowed_expr+="|^tables/"
# * Hadoop's and HBase's default configuration files, which have the form # * HBase's default configuration files, which have the form
# "_module_-default.xml" # "_module_-default.xml"
allowed_expr+="|^[^-]*-default.xml$" allowed_expr+="|^hbase-default.xml$"
# * Hadoop's versioning properties files, which have the form
# "_module_-version-info.properties"
allowed_expr+="|^[^-]*-version-info.properties$"
# * Hadoop's application classloader properties file.
allowed_expr+="|^org.apache.hadoop.application-classloader.properties$"
# public suffix list used by httpcomponents # public suffix list used by httpcomponents
allowed_expr+="|^mozilla/$" allowed_expr+="|^mozilla/$"
allowed_expr+="|^mozilla/public-suffix-list.txt$" allowed_expr+="|^mozilla/public-suffix-list.txt$"
@ -51,12 +85,30 @@ allowed_expr+="|^properties.dtd$"
allowed_expr+="|^PropertyList-1.0.dtd$" allowed_expr+="|^PropertyList-1.0.dtd$"
if [ -n "${allow_hadoop}" ]; then
# * classes in packages that start with org.apache.hadoop, which by
# convention should be in a path that looks like org/apache/hadoop
allowed_expr+="|^org/apache/hadoop/"
# * Hadoop's default configuration files, which have the form
# "_module_-default.xml"
allowed_expr+="|^[^-]*-default.xml$"
# * Hadoop's versioning properties files, which have the form
# "_module_-version-info.properties"
allowed_expr+="|^[^-]*-version-info.properties$"
# * Hadoop's application classloader properties file.
allowed_expr+="|^org.apache.hadoop.application-classloader.properties$"
else
# We have some classes for integrating with the Hadoop Metrics2 system
# that have to be in a particular package space due to access rules.
allowed_expr+="|^org/apache/hadoop/metrics2"
fi
allowed_expr+=")" allowed_expr+=")"
declare -i bad_artifacts=0 declare -i bad_artifacts=0
declare -a bad_contents declare -a bad_contents
IFS=: read -r -d '' -a artifact_list < <(printf '%s\0' "$1")
for artifact in "${artifact_list[@]}"; do for artifact in "${artifact_list[@]}"; do
bad_contents=($(jar tf "${artifact}" | grep -v -E "${allowed_expr}")) bad_contents=($(jar tf "${artifact}" | grep -v -E "${allowed_expr}" || true))
if [ ${#bad_contents[@]} -gt 0 ]; then if [ ${#bad_contents[@]} -gt 0 ]; then
echo "[ERROR] Found artifact with unexpected contents: '${artifact}'" echo "[ERROR] Found artifact with unexpected contents: '${artifact}'"
echo " Please check the following and either correct the build or update" echo " Please check the following and either correct the build or update"

View File

@ -62,6 +62,10 @@
</plugins> </plugins>
</build> </build>
<dependencies> <dependencies>
<!--
We want to ensure needed hadoop bits are at provided scope for our shaded
artifact, so we list them below in hadoop specific profiles.
-->
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-mapreduce</artifactId> <artifactId>hbase-mapreduce</artifactId>
@ -137,10 +141,6 @@
<groupId>org.eclipse.jetty</groupId> <groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId> <artifactId>jetty-webapp</artifactId>
</exclusion> </exclusion>
<exclusion>
<groupId>org.glassfish.web</groupId>
<artifactId>javax.servlet.jsp</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>org.glassfish.jersey.core</groupId> <groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-server</artifactId> <artifactId>jersey-server</artifactId>
@ -149,6 +149,17 @@
<groupId>org.glassfish.jersey.containers</groupId> <groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId> <artifactId>jersey-container-servlet-core</artifactId>
</exclusion> </exclusion>
<!-- We excluded the server-side generated classes for JSP, so exclude
their runtime support libraries too
-->
<exclusion>
<groupId>org.glassfish.web</groupId>
<artifactId>javax.servlet.jsp</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>javax.servlet.jsp-api</artifactId>
</exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
</dependencies> </dependencies>
@ -158,12 +169,175 @@
<id>release</id> <id>release</id>
<build> <build>
<plugins> <plugins>
<!-- Tell the shade plugin we want to leave Hadoop as a dependency -->
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId> <artifactId>maven-shade-plugin</artifactId>
<executions>
<execution>
<id>aggregate-into-a-jar-with-relocated-third-parties</id>
<configuration>
<artifactSet>
<excludes>
<exclude>org.apache.hadoop:*</exclude>
<!-- The rest of these should be kept in sync with the parent pom -->
<exclude>org.apache.hbase:hbase-resource-bundle</exclude>
<exclude>org.slf4j:*</exclude>
<exclude>com.google.code.findbugs:*</exclude>
<exclude>com.github.stephenc.findbugs:*</exclude>
<exclude>org.apache.htrace:*</exclude>
<exclude>org.apache.yetus:*</exclude>
<exclude>log4j:*</exclude>
<exclude>commons-logging:*</exclude>
</excludes>
</artifactSet>
</configuration>
</execution>
</executions>
</plugin> </plugin>
</plugins> </plugins>
</build> </build>
</profile> </profile>
<!-- These hadoop profiles should be derived from those in the hbase-mapreduce
module. Essentially, you must list the same hadoop-* dependencies
since provided dependencies are not transitively included.
-->
<!-- profile against Hadoop 2.x: This is the default. -->
<profile>
<id>hadoop-2.0</id>
<activation>
<property>
<!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h2--><name>!hadoop.profile</name>
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jetty</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>tomcat</groupId>
<artifactId>jasper-compiler</artifactId>
</exclusion>
<exclusion>
<groupId>tomcat</groupId>
<artifactId>jasper-runtime</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
</exclusion>
<exclusion>
<groupId>stax</groupId>
<artifactId>stax-api</artifactId>
</exclusion>
<exclusion>
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
</exclusion>
</exclusions>
<version>${hadoop-two.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</profile>
<!--
profile for building against Hadoop 3.0.x. Activate using:
mvn -Dhadoop.profile=3.0
-->
<profile>
<id>hadoop-3.0</id>
<activation>
<property>
<name>hadoop.profile</name>
<value>3.0</value>
</property>
</activation>
<properties>
<hadoop.version>${hadoop-three.version}</hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</profile>
</profiles> </profiles>
</project> </project>

View File

@ -0,0 +1,215 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>hbase</artifactId>
<groupId>org.apache.hbase</groupId>
<version>3.0.0-SNAPSHOT</version>
<relativePath>../..</relativePath>
</parent>
<artifactId>hbase-shaded-with-hadoop-check-invariants</artifactId>
<packaging>pom</packaging>
<description>
Enforces our invariants for our shaded artifacts. e.g. shaded clients have
a specific set of transitive dependencies and shaded clients only contain
classes that are in particular packages. Does the enforcement through
the maven-enforcer-plugin and integration test.
</description>
<name>Apache HBase Shaded Packaging Invariants (with Hadoop bundled)</name>
<properties>
</properties>
<dependencies>
<!-- This should only be client facing artifacts that bundle
Apache Hadoop related artifacts.
-->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-shaded-client</artifactId>
<version>${project.version}</version>
</dependency>
<!-- parent pom defines these for children. :( :( :( -->
<dependency>
<groupId>com.github.stephenc.findbugs</groupId>
<artifactId>findbugs-annotations</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<scope>provided</scope>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>org.codehaus.mojo</groupId>
<artifactId>extra-enforcer-rules</artifactId>
<version>1.0-beta-6</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>enforce-banned-dependencies</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<skip>true</skip>
<rules>
<banTransitiveDependencies>
<!--
<message>
Our client-facing artifacts are not supposed to have additional dependencies
and one or more of them do. The output from the enforcer plugin should give
specifics.
</message>
-->
<excludes>
<!-- We leave logging stuff alone -->
<exclude>org.slf4j:*</exclude>
<exclude>log4j:*</exclude>
<exclude>commons-logging:*</exclude>
<!-- annotations that never change -->
<exclude>com.google.code.findbugs:*</exclude>
<exclude>com.github.stephenc.findbugs:*</exclude>
<!-- We leave HTrace as an unshaded dependnecy on purpose so that tracing within a JVM will work -->
<exclude>org.apache.htrace:*</exclude>
<!-- NB we don't exclude Hadoop from this check here, because the assumption is any needed classes
are contained in our artifacts.
-->
</excludes>
</banTransitiveDependencies>
<banDuplicateClasses>
<findAllDuplicates>true</findAllDuplicates>
</banDuplicateClasses>
</rules>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<executions>
<execution>
<id>test-resources</id>
<phase>pre-integration-test</phase>
<goals>
<goal>testResources</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<!-- create a maven pom property that has all of our dependencies.
below in the integration-test phase we'll pass this list
of paths to our jar checker script.
-->
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>put-client-artifacts-in-a-property</id>
<phase>pre-integration-test</phase>
<goals>
<goal>build-classpath</goal>
</goals>
<configuration>
<excludeScope>provided</excludeScope>
<excludeTransitive>true</excludeTransitive>
<outputProperty>hbase-client-artifacts</outputProperty>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>1.6.0</version>
<executions>
<!-- It's easier to have two copies of our validation
script than to copy it via remote-resources-plugin, but
we need to make sure they stay the same.
-->
<execution>
<id>make-sure-validation-files-are-in-sync</id>
<phase>validate</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>diff</executable>
<requiresOnline>false</requiresOnline>
<arguments>
<argument>../hbase-shaded-with-hadoop-check-invariants/src/test/resources/ensure-jars-have-correct-contents.sh</argument>
<argument>../hbase-shaded-check-invariants/src/test/resources/ensure-jars-have-correct-contents.sh</argument>
</arguments>
</configuration>
</execution>
<!--
Check that we actually relocated everything we included.
It's critical that we don't ship third party dependencies that haven't
been relocated under our package space, since this will lead to
difficult to debug classpath errors for downstream. Unfortunately, that
means inspecting all the jars.
-->
<execution>
<id>check-jar-contents-for-stuff-with-hadoop</id>
<phase>integration-test</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${shell-executable}</executable>
<workingDirectory>${project.build.testOutputDirectory}</workingDirectory>
<requiresOnline>false</requiresOnline>
<arguments>
<argument>ensure-jars-have-correct-contents.sh</argument>
<argument>--allow-hadoop</argument>
<argument>${hbase-client-artifacts}</argument>
</arguments>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,129 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
function usage {
echo "Usage: ${0} [options] [/path/to/some/example.jar:/path/to/another/example/created.jar]"
echo ""
echo " accepts a single command line argument with a colon separated list of"
echo " paths to jars to check. Iterates through each such passed jar and checks"
echo " all the contained paths to make sure they follow the below constructed"
echo " safe list."
echo ""
echo " --allow-hadoop Include stuff from the Apache Hadoop project in the list"
echo " of allowed jar contents. default: false"
echo " --debug print more info to stderr"
exit 1
}
# if no args specified, show usage
if [ $# -lt 1 ]; then
usage
fi
# Get arguments
declare allow_hadoop
declare debug
while [ $# -gt 0 ]
do
case "$1" in
--allow-hadoop) shift; allow_hadoop="true";;
--debug) shift; debug="true";;
--) shift; break;;
-*) usage ;;
*) break;; # terminate while loop
esac
done
# should still have jars to check.
if [ $# -lt 1 ]; then
usage
fi
if [ -n "${debug}" ]; then
echo "[DEBUG] Checking on jars: $*" >&2
echo "jar command is: $(which jar)" >&2
echo "grep command is: $(which grep)" >&2
grep -V >&2 || true
fi
IFS=: read -r -d '' -a artifact_list < <(printf '%s\0' "$1")
# we have to allow the directories that lead to the hbase dirs
allowed_expr="(^org/$|^org/apache/$|^org/apache/hadoop/$"
# We allow the following things to exist in our client artifacts:
# * classes in packages that start with org.apache.hadoop.hbase, which by
# convention should be in a path that looks like org/apache/hadoop/hbase
allowed_expr+="|^org/apache/hadoop/hbase"
# * classes in packages that start with org.apache.hbase
allowed_expr+="|^org/apache/hbase/"
# * whatever in the "META-INF" directory
allowed_expr+="|^META-INF/"
# * the folding tables from jcodings
allowed_expr+="|^tables/"
# * HBase's default configuration files, which have the form
# "_module_-default.xml"
allowed_expr+="|^hbase-default.xml$"
# public suffix list used by httpcomponents
allowed_expr+="|^mozilla/$"
allowed_expr+="|^mozilla/public-suffix-list.txt$"
# Comes from commons-configuration, not sure if relocatable.
allowed_expr+="|^digesterRules.xml$"
allowed_expr+="|^properties.dtd$"
allowed_expr+="|^PropertyList-1.0.dtd$"
if [ -n "${allow_hadoop}" ]; then
# * classes in packages that start with org.apache.hadoop, which by
# convention should be in a path that looks like org/apache/hadoop
allowed_expr+="|^org/apache/hadoop/"
# * Hadoop's default configuration files, which have the form
# "_module_-default.xml"
allowed_expr+="|^[^-]*-default.xml$"
# * Hadoop's versioning properties files, which have the form
# "_module_-version-info.properties"
allowed_expr+="|^[^-]*-version-info.properties$"
# * Hadoop's application classloader properties file.
allowed_expr+="|^org.apache.hadoop.application-classloader.properties$"
else
# We have some classes for integrating with the Hadoop Metrics2 system
# that have to be in a particular package space due to access rules.
allowed_expr+="|^org/apache/hadoop/metrics2"
fi
allowed_expr+=")"
declare -i bad_artifacts=0
declare -a bad_contents
for artifact in "${artifact_list[@]}"; do
bad_contents=($(jar tf "${artifact}" | grep -v -E "${allowed_expr}" || true))
if [ ${#bad_contents[@]} -gt 0 ]; then
echo "[ERROR] Found artifact with unexpected contents: '${artifact}'"
echo " Please check the following and either correct the build or update"
echo " the allowed list with reasoning."
echo ""
for bad_line in "${bad_contents[@]}"; do
echo " ${bad_line}"
done
bad_artifacts=${bad_artifacts}+1
else
echo "[INFO] Artifact looks correct: '$(basename "${artifact}")'"
fi
done
# if there was atleast one bad artifact, exit with failure
if [ "${bad_artifacts}" -gt 0 ]; then
exit 1
fi

View File

@ -42,6 +42,7 @@
<module>hbase-shaded-client</module> <module>hbase-shaded-client</module>
<module>hbase-shaded-mapreduce</module> <module>hbase-shaded-mapreduce</module>
<module>hbase-shaded-check-invariants</module> <module>hbase-shaded-check-invariants</module>
<module>hbase-shaded-with-hadoop-check-invariants</module>
</modules> </modules>
<dependencies> <dependencies>
<dependency> <dependency>
@ -118,6 +119,7 @@
<artifactId>maven-shade-plugin</artifactId> <artifactId>maven-shade-plugin</artifactId>
<executions> <executions>
<execution> <execution>
<id>aggregate-into-a-jar-with-relocated-third-parties</id>
<phase>package</phase> <phase>package</phase>
<goals> <goals>
<goal>shade</goal> <goal>shade</goal>
@ -447,12 +449,23 @@
<exclude>META-INF/ECLIPSEF.RSA</exclude> <exclude>META-INF/ECLIPSEF.RSA</exclude>
</excludes> </excludes>
</filter> </filter>
<filter>
<!-- Duplication of classes that ship in commons-collections 2.x and 3.x
If we stop bundling a relevant commons-collections artifact we'll
need to revisit. See: https://s.apache.org/e09o
-->
<artifact>commons-beanutils:commons-beanutils-core</artifact>
<excludes>
<exclude>org/apache/commons/collections/*.class</exclude>
</excludes>
</filter>
<filter> <filter>
<!-- server side webapps that we don't need --> <!-- server side webapps that we don't need -->
<artifact>org.apache.hbase:hbase-server</artifact> <artifact>org.apache.hbase:hbase-server</artifact>
<excludes> <excludes>
<exclude>hbase-webapps/*</exclude> <exclude>hbase-webapps/*</exclude>
<exclude>hbase-webapps/**/*</exclude> <exclude>hbase-webapps/**/*</exclude>
<exclude>**/*_jsp.class</exclude>
</excludes> </excludes>
</filter> </filter>
<filter> <filter>

View File

@ -277,12 +277,6 @@
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
@ -341,10 +335,6 @@
<artifactId>hadoop-minicluster</artifactId> <artifactId>hadoop-minicluster</artifactId>
<scope>test</scope> <scope>test</scope>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
@ -408,10 +398,6 @@
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId> <artifactId>hadoop-minicluster</artifactId>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>

View File

@ -140,12 +140,6 @@
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<scope>compile</scope> <scope>compile</scope>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
@ -201,10 +195,6 @@
<artifactId>hadoop-minicluster</artifactId> <artifactId>hadoop-minicluster</artifactId>
<scope>compile</scope> <scope>compile</scope>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
@ -242,12 +232,6 @@
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId> <artifactId>hadoop-minicluster</artifactId>
<scope>compile</scope> <scope>compile</scope>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>

View File

@ -503,22 +503,12 @@
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId> <artifactId>hadoop-minicluster</artifactId>
<scope>test</scope> <scope>test</scope>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
@ -576,12 +566,6 @@
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId> <artifactId>hadoop-minicluster</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
</dependencies> </dependencies>
<build> <build>

65
pom.xml
View File

@ -1448,8 +1448,11 @@
<hadoop.guava.version>11.0.2</hadoop.guava.version> <hadoop.guava.version>11.0.2</hadoop.guava.version>
<compat.module>hbase-hadoop2-compat</compat.module> <compat.module>hbase-hadoop2-compat</compat.module>
<assembly.file>src/main/assembly/hadoop-two-compat.xml</assembly.file> <assembly.file>src/main/assembly/hadoop-two-compat.xml</assembly.file>
<audience-annotations.version>0.5.0</audience-annotations.version> <!--This property is for hadoops netty. HBase netty
comes in via hbase-thirdparty hbase-shaded-netty-->
<netty.hadoop.version>3.6.2.Final</netty.hadoop.version>
<!-- end HBASE-15925 default hadoop compatibility values --> <!-- end HBASE-15925 default hadoop compatibility values -->
<audience-annotations.version>0.5.0</audience-annotations.version>
<avro.version>1.7.7</avro.version> <avro.version>1.7.7</avro.version>
<commons-codec.version>1.10</commons-codec.version> <commons-codec.version>1.10</commons-codec.version>
<!-- pretty outdated --> <!-- pretty outdated -->
@ -1477,7 +1480,6 @@
<junit.version>4.12</junit.version> <junit.version>4.12</junit.version>
<hamcrest.version>1.3</hamcrest.version> <hamcrest.version>1.3</hamcrest.version>
<htrace.version>4.2.0-incubating</htrace.version> <htrace.version>4.2.0-incubating</htrace.version>
<htrace-hadoop.version>3.2.0-incubating</htrace-hadoop.version>
<log4j.version>1.2.17</log4j.version> <log4j.version>1.2.17</log4j.version>
<mockito-core.version>2.1.0</mockito-core.version> <mockito-core.version>2.1.0</mockito-core.version>
<!--Internally we use a different version of protobuf. See hbase-protocol-shaded--> <!--Internally we use a different version of protobuf. See hbase-protocol-shaded-->
@ -1605,7 +1607,8 @@
org.mortbay.jetty:servlet-api, javax.servlet:servlet-api: These are excluded because they are org.mortbay.jetty:servlet-api, javax.servlet:servlet-api: These are excluded because they are
the same implementations. I chose org.mortbay.jetty:servlet-api-2.5 instead, which is a third the same implementations. I chose org.mortbay.jetty:servlet-api-2.5 instead, which is a third
implementation of the same, because Hadoop also uses this version implementation of the same, because Hadoop also uses this version
javax.servlet:jsp-api in favour of org.mortbay.jetty:jsp-api-2.1 javax.servlet:jsp-api in favour of javax.servlet.jsp:javax.servlet.jsp-api:2.3.1 since it
is what glassfish's jspC jar uses and that's where we get our own need for a jsp-api.
--> -->
<!-- Intra-module dependencies --> <!-- Intra-module dependencies -->
<dependency> <dependency>
@ -1920,6 +1923,14 @@
<artifactId>commons-math3</artifactId> <artifactId>commons-math3</artifactId>
<version>${commons-math.version}</version> <version>${commons-math.version}</version>
</dependency> </dependency>
<dependency>
<!-- commons-logging is only used by hbase-http's HttpRequestLog and hbase-server's
HBaseTestingUtil.
-->
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.2</version>
</dependency>
<dependency> <dependency>
<groupId>org.apache.zookeeper</groupId> <groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId> <artifactId>zookeeper</artifactId>
@ -1983,6 +1994,16 @@
<artifactId>jackson-jaxrs-json-provider</artifactId> <artifactId>jackson-jaxrs-json-provider</artifactId>
<version>${jackson.version}</version> <version>${jackson.version}</version>
</dependency> </dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency> <dependency>
<groupId>com.fasterxml.jackson.core</groupId> <groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId> <artifactId>jackson-databind</artifactId>
@ -2086,6 +2107,12 @@
<artifactId>javax.servlet.jsp</artifactId> <artifactId>javax.servlet.jsp</artifactId>
<version>${glassfish.jsp.version}</version> <version>${glassfish.jsp.version}</version>
</dependency> </dependency>
<dependency>
<!-- this lib is used by the compiled Jsp from the above JspC -->
<groupId>javax.servlet.jsp</groupId>
<artifactId>javax.servlet.jsp-api</artifactId>
<version>2.3.1</version>
</dependency>
<dependency> <dependency>
<groupId>org.glassfish</groupId> <groupId>org.glassfish</groupId>
<artifactId>javax.el</artifactId> <artifactId>javax.el</artifactId>
@ -2550,10 +2577,6 @@
<artifactId>hadoop-hdfs</artifactId> <artifactId>hadoop-hdfs</artifactId>
<version>${hadoop-two.version}</version> <version>${hadoop-two.version}</version>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>javax.servlet.jsp</groupId> <groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId> <artifactId>jsp-api</artifactId>
@ -2595,10 +2618,6 @@
<type>test-jar</type> <type>test-jar</type>
<scope>test</scope> <scope>test</scope>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>javax.servlet.jsp</groupId> <groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId> <artifactId>jsp-api</artifactId>
@ -2643,10 +2662,6 @@
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<version>${hadoop-two.version}</version> <version>${hadoop-two.version}</version>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>commons-beanutils</groupId> <groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId> <artifactId>commons-beanutils</artifactId>
@ -2697,10 +2712,6 @@
<artifactId>hadoop-minicluster</artifactId> <artifactId>hadoop-minicluster</artifactId>
<version>${hadoop-two.version}</version> <version>${hadoop-two.version}</version>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>commons-httpclient</groupId> <groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId> <artifactId>commons-httpclient</artifactId>
@ -2819,10 +2830,6 @@
<artifactId>hadoop-hdfs</artifactId> <artifactId>hadoop-hdfs</artifactId>
<version>${hadoop-three.version}</version> <version>${hadoop-three.version}</version>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>com.sun.jersey</groupId> <groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId> <artifactId>jersey-core</artifactId>
@ -2868,10 +2875,6 @@
<type>test-jar</type> <type>test-jar</type>
<scope>test</scope> <scope>test</scope>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>javax.servlet.jsp</groupId> <groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId> <artifactId>jsp-api</artifactId>
@ -2952,10 +2955,6 @@
<groupId>com.sun.jersey</groupId> <groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId> <artifactId>jersey-server</artifactId>
</exclusion> </exclusion>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>javax.servlet.jsp</groupId> <groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId> <artifactId>jsp-api</artifactId>
@ -3020,10 +3019,6 @@
<artifactId>hadoop-minicluster</artifactId> <artifactId>hadoop-minicluster</artifactId>
<version>${hadoop-three.version}</version> <version>${hadoop-three.version}</version>
<exclusions> <exclusions>
<exclusion>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>commons-httpclient</groupId> <groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId> <artifactId>commons-httpclient</artifactId>