hbase/hbase-server/pom.xml

856 lines
30 KiB
XML
Raw Normal View History

<?xml version="1.0"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-->
<!-- for testing -->
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>hbase-build-configuration</artifactId>
<groupId>org.apache.hbase</groupId>
<version>2.2.0-SNAPSHOT</version>
<relativePath>../hbase-build-configuration</relativePath>
</parent>
<artifactId>hbase-server</artifactId>
<name>Apache HBase - Server</name>
<description>Server functionality for HBase</description>
<properties>
<test.build.webapps>target/test-classes/webapps</test.build.webapps>
<license.bundles.logo>true</license.bundles.logo>
<license.bundles.bootstrap>true</license.bundles.bootstrap>
<license.bundles.jquery>true</license.bundles.jquery>
</properties>
<build>
<!-- Make sure resources get added before they are processed
by placing this first
-->
<resources>
<!-- Add the build webabpps to the classpth -->
<resource>
<directory>${project.build.directory}</directory>
<includes>
<include>hbase-webapps/**</include>
</includes>
</resource>
</resources>
<testResources>
<!-- Our test artifact has different license info than our source/bin ones -->
<testResource>
<directory>src/test/resources/META-INF/</directory>
<targetPath>META-INF/</targetPath>
<includes>
<include>NOTICE</include>
</includes>
<filtering>true</filtering>
</testResource>
<testResource>
<directory>src/test/resources</directory>
<includes>
<include>**/**</include>
</includes>
</testResource>
</testResources>
<plugins>
<!-- licensing info from our bundled works -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-remote-resources-plugin</artifactId>
<version>1.5</version>
<dependencies>
<!-- resource bundle only needed at build time -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-resource-bundle</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>default</id>
<configuration>
<attachToTest>false</attachToTest>
<properties>
<copyright-end-year>${build.year}</copyright-end-year>
<debug-print-included-work-info>${license.debug.print.included}</debug-print-included-work-info>
<bundled-dependencies>${license.bundles.dependencies}</bundled-dependencies>
<bundled-jquery>${license.bundles.jquery}</bundled-jquery>
<bundled-logo>${license.bundles.logo}</bundled-logo>
<bundled-bootstrap>${license.bundles.bootstrap}</bundled-bootstrap>
</properties>
<resourceBundles>
<resourceBundle>${project.groupId}:hbase-resource-bundle:${project.version}</resourceBundle>
</resourceBundles>
<supplementalModelArtifacts>
<supplementalModelArtifact>${project.groupId}:hbase-resource-bundle:${project.version}</supplementalModelArtifact>
</supplementalModelArtifacts>
<supplementalModels>
<supplementalModel>supplemental-models.xml</supplementalModel>
</supplementalModels>
</configuration>
</execution>
</executions>
</plugin>
<!-- Run with -Dmaven.test.skip.exec=true to build -tests.jar without running
tests (this is needed for upstream projects whose tests need this jar simply for
compilation) -->
<plugin>
<!--Make it so assembly:single does nothing in here-->
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<skipAssembly>true</skipAssembly>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<!-- Exclude these 2 packages, because their dependency _binary_ files
include the sources, and Maven 2.2 appears to add them to the sources to compile,
weird -->
<excludes>
<exclude>org/apache/jute/**</exclude>
<exclude>org/apache/zookeeper/**</exclude>
<exclude>**/*.jsp</exclude>
<exclude>hbase-site.xml</exclude>
<exclude>hdfs-site.xml</exclude>
<exclude>log4j.properties</exclude>
<exclude>mapred-queues.xml</exclude>
<exclude>mapred-site.xml</exclude>
</excludes>
</configuration>
</plugin>
<!-- General ant tasks, bound to different build phases -->
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<!-- Generate web app sources -->
<execution>
<id>generate</id>
<phase>generate-sources</phase>
<configuration>
<target>
<property name="build.webapps" location="${project.build.directory}/hbase-webapps"/>
<property name="src.webapps" location="${basedir}/src/main/resources/hbase-webapps"/>
<property name="generated.sources" location="${project.build.directory}/generated-sources"/>
<mkdir dir="${build.webapps}"/>
<copy todir="${build.webapps}">
<fileset dir="${src.webapps}">
<exclude name="**/*.jsp"/>
<exclude name="**/.*"/>
<exclude name="**/*~"/>
</fileset>
</copy>
<!--The compile.classpath is passed in by maven -->
<taskdef classname="org.apache.jasper.JspC" name="jspcompiler" classpathref="maven.compile.classpath"/>
<mkdir dir="${build.webapps}/master/WEB-INF"/>
<jspcompiler uriroot="${src.webapps}/master" outputdir="${generated.sources}/java" package="org.apache.hadoop.hbase.generated.master" webxml="${build.webapps}/master/WEB-INF/web.xml"/>
<mkdir dir="${build.webapps}/regionserver/WEB-INF"/>
<jspcompiler uriroot="${src.webapps}/regionserver" outputdir="${generated.sources}/java" package="org.apache.hadoop.hbase.generated.regionserver" webxml="${build.webapps}/regionserver/WEB-INF/web.xml"/>
</target>
</configuration>
<goals>
<goal>run</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<executions>
<!-- Add the generated sources -->
<execution>
<id>jspcSource-packageInfo-source</id>
<phase>generate-sources</phase>
<goals>
<goal>add-source</goal>
</goals>
<configuration>
<sources>
<source>${project.build.directory}/generated-jamon</source>
<source>${project.build.directory}/generated-sources/java</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.jamon</groupId>
<artifactId>jamon-maven-plugin</artifactId>
<executions>
<execution>
<phase>generate-sources</phase>
<goals>
<goal>translate</goal>
</goals>
<configuration>
<templateSourceDir>src/main/jamon</templateSourceDir>
<templateOutputDir>target/generated-jamon</templateOutputDir>
</configuration>
</execution>
</executions>
</plugin>
<!-- General plugins -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-eclipse-plugin</artifactId>
<configuration>
<additionalProjectnatures>
<projectnature>org.jamon.project.jamonnature</projectnature>
</additionalProjectnatures>
<buildcommands>
<buildcommand>org.jamon.project.templateBuilder</buildcommand>
<buildcommand>org.eclipse.jdt.core.javabuilder</buildcommand>
<buildcommand>org.jamon.project.markerUpdater</buildcommand>
</buildcommands>
<additionalConfig>
<file>
<name>.settings/org.jamon.prefs</name>
<content># now
eclipse.preferences.version=1
templateSourceDir=src/main/jamon
templateOutputDir=target/generated-jamon
</content>
</file>
</additionalConfig>
</configuration>
</plugin>
<!-- Run findbugs -->
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
</plugin>
<!-- Testing plugins -->
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<systemPropertyVariables>
<test.build.webapps>target/test-classes/webapps</test.build.webapps>
</systemPropertyVariables>
</configuration>
</plugin>
<plugin>
<groupId>net.revelc.code</groupId>
<artifactId>warbucks-maven-plugin</artifactId>
</plugin>
</plugins>
<!-- General Resources -->
<pluginManagement>
<plugins>
<!--This plugin's configuration is used to store Eclipse m2e settings
only. It has no influence on the Maven build itself and needs to
be kept in plugin management, not in the actual plugins. -->
<plugin>
<groupId>org.eclipse.m2e</groupId>
<artifactId>lifecycle-mapping</artifactId>
<configuration>
<lifecycleMappingMetadata>
<pluginExecutions>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.jamon</groupId>
<artifactId>jamon-maven-plugin</artifactId>
<versionRange>[2.3.4,)</versionRange>
<goals>
<goal>translate</goal>
</goals>
</pluginExecutionFilter>
<action>
<execute>
<runOnIncremental>false</runOnIncremental>
<runOnConfiguration>true</runOnConfiguration>
</execute>
</action>
</pluginExecution>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<versionRange>[1.6,)</versionRange>
<goals>
<goal>run</goal>
</goals>
</pluginExecutionFilter>
<action>
<execute>
<runOnIncremental>false</runOnIncremental>
<runOnConfiguration>true</runOnConfiguration>
</execute>
</action>
</pluginExecution>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<versionRange>[2.8,)</versionRange>
<goals>
<goal>build-classpath</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore></ignore>
</action>
</pluginExecution>
</pluginExecutions>
</lifecycleMappingMetadata>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
<dependencies>
<dependency>
<groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>hbase-shaded-protobuf</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>hbase-shaded-netty</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>hbase-shaded-miscellaneous</artifactId>
</dependency>
<!-- Intra-project dependencies -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-http</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-http</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<!--Needed by the visiblity tags and acl CPEP things
in here in hbase-server (that should be out in hbase-endpoints
or integrated). -->
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of ProcedureV2 facility. This doc. keeps state on where we are at w/ the new AM: https://docs.google.com/document/d/1eVKa7FHdeoJ1-9o8yZcOTAQbv0u0bblBlCCzVSIn69g/edit#heading=h.vfdoxqut9lqn Includes list of tests disabled by this patch with reasons why. Based on patches from Matteos' repository and then fix up to get it all to pass cluster tests, filling in some missing functionality, fix of findbugs, fixing bugs, etc.. including: 1. HBASE-14616 Procedure v2 - Replace the old AM with the new AM. The basis comes from Matteo's repo here: https://github.com/matteobertozzi/hbase/commit/689227fcbfe8e6588433dbcdabf4526e3d478b2e Patch replaces old AM with the new under subpackage master.assignment. Mostly just updating classes to use new AM -- import changes -- rather than the old. It also removes old AM and supporting classes. See below for more detail. 2. HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) https://github.com/matteobertozzi/hbase/commit/3622cba4e331d2fc7bfc1932abb4c9cbf5802efa Adds running of remote procedure. Adds batching of remote calls. Adds support for assign/unassign in procedures. Adds version info reporting in rpc. Adds start of an AMv2. 3. Reporting of remote RS version is from here: https://github.com/matteobertozzi/hbase/commit/ddb4df3964e8298c88c0210e83493aa91ac0942d.patch 4. And remote dispatch of procedures is from: https://github.com/matteobertozzi/hbase/commit/186b9e7c4dae61a79509a6c3aad7f80ec61345e5 5. The split merge patches from here are also melded in: https://github.com/matteobertozzi/hbase/commit/9a3a95a2c2974842a4849d1ad867e70764e7f707 and https://github.com/matteobertozzi/hbase/commit/d6289307a02a777299f65238238a2a8af3253067 We add testing util for new AM and new sets of tests. Does a bunch of fixup on logging so its possible to follow a procedures' narrative by grepping procedure id. We spewed loads of log too on big transitions such as master fail; fixed. Fix CatalogTracker. Make it use Procedures doing clean up of Region data on split/merge. Without these changes, ITBLL was failing at larger scale (3-4hours 5B rows) because we were splitting split Regions among other things (CJ would run but wasn't taking lock on Regions so havoc). Added a bunch of doc. on Procedure primitives. Added new region-based state machine base class. Moved region-based state machines on to it. Found bugs in the way procedure locking was doing in a few of the region-based Procedures. Having them all have same subclass helps here. Added isSplittable and isMergeable to the Region Interface. Master would split/merge even though the Regions still had references. Fixed it so Master asks RegionServer if Region is splittable. Messing more w/ logging. Made all procedures log the same and report the state the same; helps when logging is regular. Rewrote TestCatalogTracker. Enabled TestMergeTableRegionProcedure. Added more functionality to MockMasterServices so can use it doing standalone testing of Procedures (made TestCatalogTracker use it instead of its own version). Add to MasterServices ability to wait on Master being up -- makes it so can Mock Master and start to implement standalone split testing. Start in on a Split region standalone test in TestAM. Fix bug where a Split can fail because it comes in in the middle of a Move (by holding lock for duration of a Move). Breaks CPs that were watching merge/split. These are run by Master now so you need to observe on Master, not on RegionServer. Details: M hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java Takes List of regionstates on construction rather than a Set. NOTE!!!!! This is a change in a public class. M hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java Add utility getShortNameToLog M hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java M hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java Add support for dispatching assign, split and merge processes. M hbase-client/src/main/java/org/apache/hadoop/hbase/master/RegionState.java Purge old overlapping states: PENDING_OPEN, PENDING_CLOSE, etc. M hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java Lots of doc on its inner workings. Bug fixes. M hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java Log and doc on workings. Bug fixes. A hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java Dispatch remote procedures every 150ms or 32 items -- which ever happens first (configurable). Runs a timeout thread. This facility is not on yet; will come in as part of a later fix. Currently works a region at a time. This class carries notion of a remote procedure and of a buffer full of these. "hbase.procedure.remote.dispatcher.threadpool.size" with default = 128 "hbase.procedure.remote.dispatcher.delay.msec" with default = 150ms "hbase.procedure.remote.dispatcher.max.queue.size" with default = 32 M hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java Add in support for merge. Remove no-longer used methods. M hbase-protocol-shaded/src/main/protobuf/Admin.proto b/hbase-protocol-shaded/src/main/protobuf/Admin.proto Add execute procedures call ExecuteProcedures. M hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto Add assign and unassign state support for procedures. M hbase-server/src/main/java/org/apache/hadoop/hbase/client/VersionInfoUtil.java Adds getting RS version out of RPC Examples: (1.3.4 is 0x0103004, 2.1.0 is 0x0201000) M hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java Remove periodic metrics chore. This is done over in new AM now. Replace AM with the new. Host the procedures executor. M hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMetaBootstrap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMetaBootstrap.java Have AMv2 handle assigning meta. M hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java Extract version number of the server making rpc. A hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java Add new assign procedure. Runs assign via Procedure Dispatch. There can only be one RegionTransitionProcedure per region running at the time, since each procedure takes a lock on the region. D hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignCallable.java D hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java D hbase-server/src/main/java/org/apache/hadoop/hbase/master/BulkAssigner.java D hbase-server/src/main/java/org/apache/hadoop/hbase/master/GeneralBulkAssigner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/GeneralBulkAssigner.java Remove these hacky classes that were never supposed to live longer than a month or so to be replaced with real assigners. D hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionStateStore.java D hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionStates.java D hbase-server/src/main/java/org/apache/hadoop/hbase/master/UnAssignCallable.java A hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java A procedure-based AM (AMv2). TODO - handle region migration - handle meta assignment first - handle sys table assignment first (e.g. acl, namespace) - handle table priorities "hbase.assignment.bootstrap.thread.pool.size"; default size is 16. "hbase.assignment.dispatch.wait.msec"; default wait is 150 "hbase.assignment.dispatch.wait.queue.max.size"; wait max default is 100 "hbase.assignment.rit.chore.interval.msec"; default is 5 * 1000; "hbase.assignment.maximum.attempts"; default is 10; A hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java Procedure that runs subprocedure to unassign and then assign to new location A hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java Manage store of region state (in hbase:meta by default). A hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java In-memory state of all regions. Used by AMv2. A hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java Base RIT procedure for Assign and Unassign. A hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java Unassign procedure. A hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java Run region assignement in a manner that pays attention to target server version. Adds "hbase.regionserver.rpc.startup.waittime"; defaults 60 seconds.
2017-05-31 20:49:11 -04:00
<artifactId>hbase-protocol-shaded</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-procedure</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-zookeeper</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-replication</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-procedure</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-zookeeper</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-metrics-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-metrics</artifactId>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>${compat.module}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>${compat.module}</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId>
</dependency>
<dependency>
<!-- For JspC used in ant task, then needed at compile /runtime
because the source code made from the JSP refers to its runtime
-->
<groupId>org.glassfish.web</groupId>
<artifactId>javax.servlet.jsp</artifactId>
</dependency>
<!-- Also used by generated sources from our JSP -->
<dependency>
<groupId>javax.servlet.jsp</groupId>
<artifactId>javax.servlet.jsp-api</artifactId>
</dependency>
<!-- General dependencies -->
<dependency>
<groupId>com.github.stephenc.findbugs</groupId>
<artifactId>findbugs-annotations</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-math3</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</dependency>
<dependency>
<groupId>org.jamon</groupId>
<artifactId>jamon-runtime</artifactId>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
</dependency>
<!-- Jackson only used in compile/runtime scope by BlockCacheUtil class
also used by some tests
-->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
</dependency>
<!-- tracing Dependencies -->
<dependency>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core4</artifactId>
</dependency>
<dependency>
<groupId>com.lmax</groupId>
<artifactId>disruptor</artifactId>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk16</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.kerby</groupId>
<artifactId>kerb-client</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.kerby</groupId>
<artifactId>kerb-simplekdc</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<scope>test</scope>
</dependency>
<!-- commons-logging is used by HBTU to monkey with log levels
have to put it at compile scope because Hadoop's IOUtils uses it
both for hadoop 2.7 and 3.0, so we'll fail at compile if it's at test scope.
-->
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-crypto</artifactId>
<version>${commons-crypto.version}</version>
<exclusions>
<exclusion>
<groupId>net.java.dev.jna</groupId>
<artifactId>jna</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>
<!-- Needs to make the profile in apache parent pom -->
<profile>
<id>apache-release</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<executions>
<execution>
<id>license-javadocs</id>
<phase>prepare-package</phase>
<goals>
<goal>copy-resources</goal>
</goals>
<configuration>
<outputDirectory>${project.build.directory}/apidocs</outputDirectory>
<resources>
<resource>
<directory>src/main/javadoc/META-INF/</directory>
<targetPath>META-INF/</targetPath>
<includes>
<include>LICENSE</include>
<include>NOTICE</include>
</includes>
<filtering>true</filtering>
</resource>
</resources>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<!-- Skip the tests in this module -->
<profile>
<id>skipServerTests</id>
<activation>
<property>
<name>skipServerTests</name>
</property>
</activation>
<properties>
<surefire.skipFirstPart>true</surefire.skipFirstPart>
<surefire.skipSecondPart>true</surefire.skipSecondPart>
</properties>
</profile>
<!-- Special builds -->
<profile>
<id>native</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>make</id>
<phase>compile</phase>
<goals><goal>run</goal></goals>
<configuration>
<target>
<mkdir dir="${project.build.directory}/native"/>
<exec executable="cmake" dir="${project.build.directory}/native" failonerror="true">
<arg line="${basedir}/src/main/native -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/>
</exec>
<exec executable="make" dir="${project.build.directory}/native" failonerror="true">
<arg line="VERBOSE=1"/>
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<!-- Profiles for building against different hadoop versions -->
<!-- There are a lot of common dependencies used here, should investigate
if we can combine these profiles somehow -->
<!-- profile for building against Hadoop 2.x. This is the default. -->
<profile>
<id>hadoop-2.0</id>
<activation>
<property>
<!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h2--><name>!hadoop.profile</name>
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<type>test-jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Hadoop needs Netty 3.x at test scope for the minicluster -->
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
<version>${netty.hadoop.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>create-mrapp-generated-classpath</id>
<phase>generate-test-resources</phase>
<goals>
<goal>build-classpath</goal>
</goals>
<configuration>
<!-- needed to run the unit test for DS to generate
the required classpath that is required in the env
of the launch container in the mini mr/yarn cluster
-->
<outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<!--
profile for building against Hadoop 3.0.x. Activate using:
mvn -Dhadoop.profile=3.0
-->
<profile>
<id>hadoop-3.0</id>
<activation>
<property>
<name>hadoop.profile</name>
<value>3.0</value>
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<scope>test</scope>
</dependency>
<!-- Hadoop needs Netty 3.x at test scope for the minicluster>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
<version>${netty.hadoop.version}</version>
<scope>test</scope>
</dependency-->
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>create-mrapp-generated-classpath</id>
<phase>generate-test-resources</phase>
<goals>
<goal>build-classpath</goal>
</goals>
<configuration>
<!-- needed to run the unit test for DS to generate
the required classpath that is required in the env
of the launch container in the mini mr/yarn cluster
-->
<outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>