hadoop/hadoop-hdfs-project/hadoop-hdfs/pom.xml

630 lines
24 KiB
XML
Raw Normal View History

<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project-dist</artifactId>
<version>2.8.0-SNAPSHOT</version>
<relativePath>../../hadoop-project-dist</relativePath>
</parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.8.0-SNAPSHOT</version>
<description>Apache Hadoop HDFS</description>
<name>Apache Hadoop HDFS</name>
<packaging>jar</packaging>
<properties>
<hadoop.component>hdfs</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
<require.fuse>false</require.fuse>
<require.libwebhdfs>false</require.libwebhdfs>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>test</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jetty</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jetty-util</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-daemon</groupId>
<artifactId>commons-daemon</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>xmlenc</groupId>
<artifactId>xmlenc</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.twitter</groupId>
<artifactId>hpack</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-kms</artifactId>
<classifier>classes</classifier>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-kms</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.fusesource.leveldbjni</groupId>
<artifactId>leveldbjni-all</artifactId>
<version>1.8</version>
</dependency>
<dependency>
<groupId>org.mock-server</groupId>
<artifactId>mockserver-netty</artifactId>
<version>3.9.2</version>
<scope>test</scope>
</dependency>
<!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk16</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<systemPropertyVariables>
<startKdc>${startKdc}</startKdc>
<kdc.resource.dir>${kdc.resource.dir}</kdc.resource.dir>
<runningWithNative>${runningWithNative}</runningWithNative>
</systemPropertyVariables>
<properties>
<property>
<name>listener</name>
<value>org.apache.hadoop.test.TimedOutTestsListener</value>
</property>
</properties>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<configuration>
<skipTests>false</skipTests>
</configuration>
<executions>
<execution>
<id>create-web-xmls</id>
<phase>compile</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<copy file="${basedir}/src/main/webapps/proto-web.xml"
tofile="${project.build.directory}/webapps/hdfs/WEB-INF/web.xml"
filtering="true"/>
<copy file="${basedir}/src/main/webapps/proto-web.xml"
tofile="${project.build.directory}/webapps/secondary/WEB-INF/web.xml"
filtering="true"/>
<copy file="${basedir}/src/main/webapps/proto-web.xml"
tofile="${project.build.directory}/webapps/datanode/WEB-INF/web.xml"
filtering="true"/>
<copy file="${basedir}/src/main/webapps/proto-web.xml"
tofile="${project.build.directory}/webapps/journal/WEB-INF/web.xml"
filtering="true"/>
<copy file="${basedir}/src/main/webapps/proto-web.xml"
tofile="${project.build.directory}/webapps/nfs3/WEB-INF/web.xml"
filtering="true"/>
<copy toDir="${project.build.directory}/webapps">
<fileset dir="${basedir}/src/main/webapps">
<exclude name="**/proto-web.xml"/>
</fileset>
</copy>
</target>
</configuration>
</execution>
<execution>
<id>create-log-dir</id>
<phase>process-test-resources</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<delete dir="${test.build.data}"/>
<mkdir dir="${test.build.data}"/>
<mkdir dir="${hadoop.log.dir}"/>
<copy todir="${project.build.directory}/test-classes/webapps">
<fileset dir="${project.build.directory}/webapps">
<exclude name="proto-*-web.xml"/>
<exclude name="**/proto-web.xml"/>
</fileset>
</copy>
</target>
</configuration>
</execution>
<execution>
<phase>pre-site</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<tasks>
<copy file="src/main/resources/hdfs-default.xml" todir="src/site/resources"/>
<copy file="src/main/xsl/configuration.xsl" todir="src/site/resources"/>
</tasks>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
<id>compile-protoc</id>
<phase>generate-sources</phase>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<protocVersion>${protobuf.version}</protocVersion>
<protocCommand>${protoc.path}</protocCommand>
<imports>
<param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
<param>${basedir}/../hadoop-hdfs-client/src/main/proto</param>
<param>${basedir}/src/main/proto</param>
</imports>
<source>
<directory>${basedir}/src/main/proto</directory>
<includes>
<include>DatanodeProtocol.proto</include>
<include>HAZKInfo.proto</include>
<include>InterDatanodeProtocol.proto</include>
<include>JournalProtocol.proto</include>
<include>NamenodeProtocol.proto</include>
<include>QJournalProtocol.proto</include>
<include>editlog.proto</include>
<include>fsimage.proto</include>
</includes>
</source>
<output>${project.build.directory}/generated-sources/java</output>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<configuration>
<excludePackageNames>org.apache.hadoop.hdfs.protocol.proto</excludePackageNames>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
<excludes>
<exclude>CHANGES.txt</exclude>
<exclude>CHANGES.HDFS-1623.txt</exclude>
<exclude>.gitattributes</exclude>
<exclude>.idea/**</exclude>
<exclude>src/main/conf/*</exclude>
<exclude>src/main/docs/**</exclude>
<exclude>dev-support/findbugsExcludeFile.xml</exclude>
<exclude>dev-support/checkstyle*</exclude>
<exclude>dev-support/jdiff/**</exclude>
<exclude>dev-support/*tests</exclude>
<exclude>src/main/native/*</exclude>
<exclude>src/main/native/config/*</exclude>
<exclude>src/main/native/m4/*</exclude>
<exclude>src/test/empty-file</exclude>
<exclude>src/test/all-tests</exclude>
<exclude>src/test/resources/*.tgz</exclude>
<exclude>src/test/resources/data*</exclude>
<exclude>src/test/resources/editsStored*</exclude>
<exclude>src/test/resources/empty-file</exclude>
<exclude>src/main/native/util/tree.h</exclude>
<exclude>src/test/aop/org/apache/hadoop/hdfs/server/datanode/DataXceiverAspects.aj</exclude>
<exclude>src/main/webapps/datanode/robots.txt</exclude>
<exclude>src/main/docs/releasenotes.html</exclude>
<exclude>src/contrib/**</exclude>
<exclude>src/site/resources/images/*</exclude>
<exclude>src/main/webapps/static/bootstrap-3.0.2/**</exclude>
<exclude>src/main/webapps/static/moment.min.js</exclude>
<exclude>src/main/webapps/static/dust-full-2.0.0.min.js</exclude>
<exclude>src/main/webapps/static/dust-helpers-1.1.1.min.js</exclude>
<exclude>src/main/webapps/static/jquery-1.10.2.min.js</exclude>
<exclude>src/main/webapps/static/jquery.dataTables.min.js</exclude>
<exclude>src/main/webapps/static/dataTables.bootstrap.css</exclude>
<exclude>src/main/webapps/static/dataTables.bootstrap.js</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<artifactId>maven-clean-plugin</artifactId>
<configuration>
<filesets>
<fileset>
<directory>src/site/resources</directory>
<includes>
<include>configuration.xsl</include>
<include>hdfs-default.xml</include>
</includes>
<followSymlinks>false</followSymlinks>
</fileset>
</filesets>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>native-win</id>
<activation>
<activeByDefault>false</activeByDefault>
<os>
<family>windows</family>
</os>
</activation>
<properties>
<runningWithNative>true</runningWithNative>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<executions>
<execution>
<id>enforce-os</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<requireOS>
<family>windows</family>
<message>native-win build only supported on Windows</message>
</requireOS>
</rules>
<fail>true</fail>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>make</id>
<phase>compile</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<condition property="generator" value="Visual Studio 10" else="Visual Studio 10 Win64">
<equals arg1="Win32" arg2="${env.PLATFORM}" />
</condition>
<mkdir dir="${project.build.directory}/native"/>
<exec executable="cmake" dir="${project.build.directory}/native"
failonerror="true">
<arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_LIBWEBHDFS=${require.libwebhdfs} -DREQUIRE_FUSE=${require.fuse} -G '${generator}'"/>
</exec>
<exec executable="msbuild" dir="${project.build.directory}/native"
failonerror="true">
<arg line="ALL_BUILD.vcxproj /nologo /p:Configuration=RelWithDebInfo /p:LinkIncremental=false"/>
</exec>
<!-- Copy for inclusion in distribution. -->
<copy todir="${project.build.directory}/bin">
<fileset dir="${project.build.directory}/native/target/bin/RelWithDebInfo"/>
</copy>
</target>
</configuration>
</execution>
<execution>
<id>native_tests</id>
<phase>test</phase>
<goals><goal>run</goal></goals>
<configuration>
<skip>${skipTests}</skip>
<target>
<property name="compile_classpath" refid="maven.compile.classpath"/>
<property name="test_classpath" refid="maven.test.classpath"/>
<macrodef name="run-test">
<attribute name="test"/>
<sequential>
<echo message="Running @{test}"/>
<exec executable="${project.build.directory}/native/RelWithDebInfo/@{test}" failonerror="true" dir="${project.build.directory}/native/">
<env key="CLASSPATH" value="${test_classpath}:${compile_classpath}"/>
<!-- HADOOP_HOME required to find winutils. -->
<env key="HADOOP_HOME" value="${hadoop.common.build.dir}"/>
<!-- Make sure hadoop.dll and jvm.dll are on PATH. -->
<env key="PATH" value="${env.PATH};${hadoop.common.build.dir}/bin;${java.home}/jre/bin/server;${java.home}/bin/server"/>
</exec>
<echo message="Finished @{test}"/>
</sequential>
</macrodef>
<run-test test="test_libhdfs_threaded"/>
<echo message="Skipping test_libhdfs_zerocopy"/>
<run-test test="test_native_mini_dfs"/>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>native</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<properties>
<runningWithNative>true</runningWithNative>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>make</id>
<phase>compile</phase>
<goals><goal>run</goal></goals>
<configuration>
<target>
<mkdir dir="${project.build.directory}/native"/>
<exec executable="cmake" dir="${project.build.directory}/native"
failonerror="true">
<arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_LIBWEBHDFS=${require.libwebhdfs} -DREQUIRE_FUSE=${require.fuse}"/>
</exec>
<exec executable="make" dir="${project.build.directory}/native" failonerror="true">
<arg line="VERBOSE=1"/>
</exec>
<!-- The second make is a workaround for HADOOP-9215. It can
be removed when version 2.6 of cmake is no longer supported . -->
<exec executable="make" dir="${project.build.directory}/native" failonerror="true"></exec>
</target>
</configuration>
</execution>
<execution>
<id>native_tests</id>
<phase>test</phase>
<goals><goal>run</goal></goals>
<configuration>
<skip>${skipTests}</skip>
<target>
<property name="compile_classpath" refid="maven.compile.classpath"/>
<property name="test_classpath" refid="maven.test.classpath"/>
<macrodef name="run-test">
<attribute name="test"/>
<sequential>
<echo message="Running @{test}"/>
<exec executable="${project.build.directory}/native/@{test}" failonerror="true" dir="${project.build.directory}/native/">
<env key="CLASSPATH" value="${test_classpath}:${compile_classpath}"/>
<!-- Make sure libhadoop.so is on LD_LIBRARY_PATH. -->
<env key="LD_LIBRARY_PATH" value="${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib"/>
</exec>
<echo message="Finished @{test}"/>
</sequential>
</macrodef>
<run-test test="test_libhdfs_threaded"/>
<run-test test="test_libhdfs_zerocopy"/>
<run-test test="test_native_mini_dfs"/>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>parallel-tests</id>
<build>
<plugins>
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>create-parallel-tests-dirs</id>
<phase>test-compile</phase>
<configuration>
<target>
<exec executable="${shell-executable}">
<arg value="-c"/>
<arg value="for i in {1..${testsThreadCount}}; do mkdir -p ${test.build.data}/$i; mkdir -p ${hadoop.tmp.dir}/$i; done"/>
</exec>
</target>
</configuration>
<goals>
<goal>run</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<forkCount>${testsThreadCount}</forkCount>
<argLine>-Xmx1024m -XX:+HeapDumpOnOutOfMemoryError -DminiClusterDedicatedDirs=true</argLine>
<systemPropertyVariables>
<test.build.data>${test.build.data}/${surefire.forkNumber}</test.build.data>
<hadoop.tmp.dir>${hadoop.tmp.dir}/${surefire.forkNumber}</hadoop.tmp.dir>
</systemPropertyVariables>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>