hbase/hbase-mapreduce/pom.xml

424 lines
14 KiB
XML
Raw Normal View History

2022-05-01 10:52:40 -04:00
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="https://maven.apache.org/POM/4.0.0" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-->
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hbase</groupId>
2022-05-01 10:52:40 -04:00
<artifactId>hbase-build-configuration</artifactId>
<version>${revision}</version>
<relativePath>../hbase-build-configuration</relativePath>
</parent>
<artifactId>hbase-mapreduce</artifactId>
<name>Apache HBase - MapReduce</name>
2022-05-01 10:52:40 -04:00
<description>This module contains implementations of InputFormat, OutputFormat, Mapper, Reducer, etc which
are needed for running MR jobs on tables, WALs, HFiles and other HBase specific constructs.
It also contains a bunch of tools: RowCounter, ImportTsv, Import, Export, CompactionTool,
2022-05-01 10:52:40 -04:00
ExportSnapshot, WALPlayer, etc</description>
<dependencies>
<dependency>
<groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>hbase-shaded-miscellaneous</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>hbase-shaded-netty</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>hbase-shaded-protobuf</artifactId>
</dependency>
<!-- Intra-project dependencies -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-logging</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-zookeeper</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-zookeeper</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<!--Needed by ExportSnapshot. It is reading
Snapshot protos. TODO: Move to internal types.-->
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol</artifactId>
</dependency>
<dependency>
<!--Needed by ExportSnapshot. It is reading
Snapshot protos. TODO: Move to internal types.-->
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol-shaded</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-metrics</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-metrics-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-asyncfs</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-asyncfs</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
</dependency>
<!--This is not used by hbase directly. Used by thrift,
dropwizard and zk.-->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>io.opentelemetry</groupId>
<artifactId>opentelemetry-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>${compat.module}</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<!--Do not removed. maven dependency:analyze says not needed but it
is; perhaps it comes in via reflection so maven can't figure it-->
<groupId>org.apache.hbase</groupId>
<artifactId>${compat.module}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<exclusions>
<!-- commons-logging is only used by hbase-http's HttpRequestLog and hbase-server's
HBaseTestingUtil. We don't need either of those here, so execlude it.
-->
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-replication</artifactId>
</dependency>
<!-- General dependencies -->
<dependency>
<groupId>com.github.stephenc.findbugs</groupId>
<artifactId>findbugs-annotations</artifactId>
<scope>compile</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jul-to-slf4j</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
HBASE-27340 Artifacts with resolved profiles (#4740) Make it so our published poms carry the minimum needed to run an hbase; the published pom has no profiles -- the profiles specified at build time are resolved, their dependencies inlined, and then they are stripped -- and no build-time, or plugins dependencies or properties, etc. Resultant poms have explicit hadoop lib versions baked in -- no more being able to choose hbase with hadoop2 or haddop3 at downstream build time by setting a '-Dhadoop.profile=X.0'. Pattern is to add profiles when none in sub-modules when the flatten plugin complains it can't resolve an hadoop dependency's 'version' (e.g. hadoop-common, hadoop-hdfs). Adding the hadoop-2.0 and hadoop-3.0 profiles in the sub-module make it so the flatten plugin can figure 'hadoop.version' definitively. Another spin on the above happens when profiles already exist in submodule but the flatten plugin is complaining it can't figure figure version on an hadoop dependency NOT under profiles. Below, we move the delinquent hadoop dependency under existing profiles (minikdc was the usual dependency outside profiles in sub-modules that flatten complained about). Sometimes, moving an hadoop dependency under a profile, there would be excludes on the local dependency. If the parent pom excludes section was missing the local excludes, we added them up to the parent module so all excluding is done up there in the parent profile dependencyManagement section. * hbase-asyncfs/pom.xml * hbase-endpoint/pom.xml * hbase-examples/pom.xml * hbase-http/pom.xml * hbase-rest/pom.xml * hbase-server/pom.xml Move the minikdc under profiles so it picks up appropriate hadoop version when the flatten plugin runs. * hbase-hadoop2-compat/pom.xml Add hadoop2 and hadoop3 profiles and move hadoop-common, etc. under them so we pick up appropriate hadoop version when flatten plugin runs. * hbase-mapreduce/pom.xml Move hadoop dependencies under profiles so right version is available when the flatten plugin runs. * hbase-shaded/hbase-shaded-testing-util/pom.xml Add profiles for hadoop-2.0 and hadoop-3.0 and move the hadoop dependencies under them. pom.xml Add the flatten plugin with the flatten profiles enabled. Add a few excludes on hadoop profiles picked up from sub-modules. E.g. exclude bouncycastle bcprov-jdk15 when we include minikdc. Signed-off-by: Andrew Purtell <apurtell@apache.org> Signed-off-by: Duo Zhang <zhangduo@apache.org>
2022-09-02 19:43:29 -04:00
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
2022-05-01 10:52:40 -04:00
<build>
<plugins>
<plugin>
<!--Make it so assembly:single does nothing in here-->
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<skipAssembly>true</skipAssembly>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<archive>
<manifest>
<!--Include the Driver class as the 'main'. Executing the jar will then show a
list of the basic MR jobs.-->
<mainClass>org/apache/hadoop/hbase/mapreduce/Driver</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
<!-- Make a jar and put the sources in the jar -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
</plugin>
<plugin>
<groupId>net.revelc.code</groupId>
<artifactId>warbucks-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
<profiles>
<!-- Skip the tests in this module -->
<profile>
<id>skipMapReduceTests</id>
<activation>
<property>
<name>skipMapReduceTests</name>
</property>
</activation>
<properties>
<surefire.skipFirstPart>true</surefire.skipFirstPart>
<surefire.skipSecondPart>true</surefire.skipSecondPart>
</properties>
</profile>
<!-- profile against Hadoop 2.x: This is the default. -->
<profile>
<id>hadoop-2.0</id>
<activation>
<property>
2022-05-01 10:52:40 -04:00
<!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h2-->
<name>!hadoop.profile</name>
</property>
</activation>
<dependencies>
<!-- Hadoop needs Netty 3.x at test scope for the minicluster -->
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
<version>${netty.hadoop.version}</version>
<scope>test</scope>
</dependency>
HBASE-27340 Artifacts with resolved profiles (#4740) Make it so our published poms carry the minimum needed to run an hbase; the published pom has no profiles -- the profiles specified at build time are resolved, their dependencies inlined, and then they are stripped -- and no build-time, or plugins dependencies or properties, etc. Resultant poms have explicit hadoop lib versions baked in -- no more being able to choose hbase with hadoop2 or haddop3 at downstream build time by setting a '-Dhadoop.profile=X.0'. Pattern is to add profiles when none in sub-modules when the flatten plugin complains it can't resolve an hadoop dependency's 'version' (e.g. hadoop-common, hadoop-hdfs). Adding the hadoop-2.0 and hadoop-3.0 profiles in the sub-module make it so the flatten plugin can figure 'hadoop.version' definitively. Another spin on the above happens when profiles already exist in submodule but the flatten plugin is complaining it can't figure figure version on an hadoop dependency NOT under profiles. Below, we move the delinquent hadoop dependency under existing profiles (minikdc was the usual dependency outside profiles in sub-modules that flatten complained about). Sometimes, moving an hadoop dependency under a profile, there would be excludes on the local dependency. If the parent pom excludes section was missing the local excludes, we added them up to the parent module so all excluding is done up there in the parent profile dependencyManagement section. * hbase-asyncfs/pom.xml * hbase-endpoint/pom.xml * hbase-examples/pom.xml * hbase-http/pom.xml * hbase-rest/pom.xml * hbase-server/pom.xml Move the minikdc under profiles so it picks up appropriate hadoop version when the flatten plugin runs. * hbase-hadoop2-compat/pom.xml Add hadoop2 and hadoop3 profiles and move hadoop-common, etc. under them so we pick up appropriate hadoop version when flatten plugin runs. * hbase-mapreduce/pom.xml Move hadoop dependencies under profiles so right version is available when the flatten plugin runs. * hbase-shaded/hbase-shaded-testing-util/pom.xml Add profiles for hadoop-2.0 and hadoop-3.0 and move the hadoop dependencies under them. pom.xml Add the flatten plugin with the flatten profiles enabled. Add a few excludes on hadoop profiles picked up from sub-modules. E.g. exclude bouncycastle bcprov-jdk15 when we include minikdc. Signed-off-by: Andrew Purtell <apurtell@apache.org> Signed-off-by: Duo Zhang <zhangduo@apache.org>
2022-09-02 19:43:29 -04:00
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<!--maven dependency:analyze says not needed but tests fail w/o-->
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
</dependencies>
</profile>
<!--
profile for building against Hadoop 3.x. Activate using:
mvn -Dhadoop.profile=3.0
-->
<profile>
<id>hadoop-3.0</id>
<activation>
<property>
<name>hadoop.profile</name>
<value>3.0</value>
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<!--maven dependency:analyze says not needed but tests fail w/o-->
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
</dependencies>
</profile>
<profile>
<id>eclipse-specific</id>
<activation>
<property>
<name>m2e.version</name>
</property>
</activation>
<build>
<pluginManagement>
<plugins>
<!--This plugin's configuration is used to store Eclipse m2e settings
only. It has no influence on the Maven build itself.-->
<plugin>
<groupId>org.eclipse.m2e</groupId>
<artifactId>lifecycle-mapping</artifactId>
<configuration>
<lifecycleMappingMetadata>
2022-05-01 10:52:40 -04:00
<pluginExecutions/>
</lifecycleMappingMetadata>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
</profile>
</profiles>
</project>