HBASE-18723 [pom cleanup] Do a pass with dependency:analyze; remove unused and explicity list the dependencies we exploit; ADDENDUM

Addendum addresses holes found running HBASE-18674 against hadoopqa.
This commit is contained in:
Michael Stack 2017-09-02 13:14:09 -07:00
parent c762753b4b
commit 91ab25b469
4 changed files with 113 additions and 11 deletions

View File

@ -107,6 +107,12 @@
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-mapreduce</artifactId>
@ -156,11 +162,6 @@
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
@ -179,7 +180,7 @@
</property>
</activation>
<dependencies>
<!--dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<exclusions>
@ -223,14 +224,57 @@
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
</exclusion>
<!--We let hadoop include guava here. It uses it
in the Configuration class -->
</exclusions>
</dependency-->
<!-- Hadoop needs Netty 3.x at test scope for the minicluster -->
</dependency>
<dependency>
<!--Hadoop2 Uses Guava in Configuration class at least-->
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${hadoop.guava.version}</version>
<exclusions>
<exclusion>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<!--This module seems to need this.-->
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
<version>${netty.hadoop.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoop-two.version}</version>
<exclusions>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.inject</groupId>
<artifactId>javax.inject</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-distcp</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
</dependencies>
</profile>

View File

@ -153,6 +153,19 @@
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
</dependency>
<dependency>
<!--Needed by ExportSnapshot. It is reading
Snapshot protos. TODO: Move to internal types.-->
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol</artifactId>
</dependency>
<dependency>
<!--Needed by ExportSnapshot. It is reading
Snapshot protos. TODO: Move to internal types.-->
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<version>${external.protobuf.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol-shaded</artifactId>
@ -165,6 +178,17 @@
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-metrics-api</artifactId>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
</dependency>
<!--This is not used by hbase directly. Used by thrift,
dropwizard and zk.-->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-prefix-tree</artifactId>
@ -182,6 +206,14 @@
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>${compat.module}</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId>
@ -189,8 +221,10 @@
<scope>test</scope>
</dependency>
<dependency>
<!--Do not removed. maven dependency:analyze says not needed but it
is; perhaps it comes in via reflection so maven can't figure it-->
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop2-compat</artifactId>
<artifactId>${compat.module}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
@ -379,7 +413,7 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
@ -388,6 +422,7 @@
</exclusions>
</dependency>
<dependency>
<!--maven dependency:analyze says not needed but tests fail w/o-->
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<scope>test</scope>
@ -417,6 +452,7 @@
<artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<!--maven dependency:analyze says not needed but tests fail w/o-->
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
</dependency>

View File

@ -216,6 +216,7 @@
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
</dependency>
<!--Below MR wanted by PE-->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-mapreduce</artifactId>
@ -230,11 +231,26 @@
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<!-- REMOVE
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>${compat.module}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>${compat.module}</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
-->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
@ -344,6 +360,11 @@
<artifactId>findbugs-annotations</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>${servlet.api.version}</version>
</dependency>
</dependencies>
<profiles>
<!-- Skip the tests in this module -->

View File

@ -1341,6 +1341,7 @@
They ought to match the values found in our default hadoop profile, which is
currently "hadoop-2.0". See HBASE-15925 for more info. -->
<hadoop.version>${hadoop-two.version}</hadoop.version>
<hadoop.guava.version>11.0.2</hadoop.guava.version>
<compat.module>hbase-hadoop2-compat</compat.module>
<assembly.file>src/main/assembly/hadoop-two-compat.xml</assembly.file>
<!-- end HBASE-15925 default hadoop compatibility values -->