hadoop/hadoop-hdfs-project/hadoop-hdfs/pom.xml

591 lines
22 KiB
XML
Raw Normal View History

<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project-dist</artifactId>
<version>3.2.0-SNAPSHOT</version>
<relativePath>../../hadoop-project-dist</relativePath>
</parent>
<artifactId>hadoop-hdfs</artifactId>
<version>3.2.0-SNAPSHOT</version>
<description>Apache Hadoop HDFS</description>
<name>Apache Hadoop HDFS</name>
<packaging>jar</packaging>
<properties>
<hadoop.component>hdfs</hadoop.component>
<kdc.resource.dir>../../hadoop-common-project/hadoop-common/src/test/resources/kdc</kdc.resource.dir>
<is.hadoop.component>true</is.hadoop.component>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>test</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util-ajax</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-daemon</groupId>
<artifactId>commons-daemon</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core4</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-kms</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-kms</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.fusesource.leveldbjni</groupId>
<artifactId>leveldbjni-all</artifactId>
</dependency>
<!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk16</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<systemPropertyVariables>
<startKdc>${startKdc}</startKdc>
<kdc.resource.dir>${kdc.resource.dir}</kdc.resource.dir>
<runningWithNative>${runningWithNative}</runningWithNative>
</systemPropertyVariables>
<properties>
<property>
<name>listener</name>
<value>org.apache.hadoop.test.TimedOutTestsListener</value>
</property>
</properties>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<configuration>
<skipTests>false</skipTests>
</configuration>
<executions>
<execution>
<id>create-web-xmls</id>
<phase>compile</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<copy file="${basedir}/src/main/webapps/proto-web.xml"
tofile="${project.build.directory}/webapps/hdfs/WEB-INF/web.xml"
filtering="true"/>
<copy file="${basedir}/src/main/webapps/proto-web.xml"
tofile="${project.build.directory}/webapps/secondary/WEB-INF/web.xml"
filtering="true"/>
<copy file="${basedir}/src/main/webapps/proto-web.xml"
tofile="${project.build.directory}/webapps/datanode/WEB-INF/web.xml"
filtering="true"/>
<copy file="${basedir}/src/main/webapps/proto-web.xml"
tofile="${project.build.directory}/webapps/journal/WEB-INF/web.xml"
filtering="true"/>
<copy file="${basedir}/src/main/webapps/proto-web.xml"
tofile="${project.build.directory}/webapps/nfs3/WEB-INF/web.xml"
filtering="true"/>
<copy toDir="${project.build.directory}/webapps">
<fileset dir="${basedir}/src/main/webapps">
<exclude name="**/proto-web.xml"/>
</fileset>
</copy>
<replace dir="${project.build.directory}/webapps" value="${release-year}">
<include name="**/*.html"/>
<replacetoken>{release-year-token}</replacetoken>
</replace>
</target>
</configuration>
</execution>
<execution>
<id>create-log-dir</id>
<phase>process-test-resources</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<delete dir="${test.build.data}"/>
<mkdir dir="${test.build.data}"/>
<mkdir dir="${hadoop.log.dir}"/>
<copy todir="${project.build.directory}/test-classes/webapps">
<fileset dir="${project.build.directory}/webapps">
<exclude name="proto-*-web.xml"/>
<exclude name="**/proto-web.xml"/>
</fileset>
</copy>
</target>
</configuration>
</execution>
<execution>
<phase>pre-site</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<tasks>
<copy file="src/main/resources/hdfs-default.xml" todir="src/site/resources"/>
<copy file="src/main/xsl/configuration.xsl" todir="src/site/resources"/>
</tasks>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
<id>compile-protoc</id>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<protocVersion>${protobuf.version}</protocVersion>
<protocCommand>${protoc.path}</protocCommand>
<imports>
<param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
<param>${basedir}/../hadoop-hdfs-client/src/main/proto</param>
<param>${basedir}/src/main/proto</param>
</imports>
<source>
<directory>${basedir}/src/main/proto</directory>
<includes>
<include>HdfsServer.proto</include>
<include>DatanodeProtocol.proto</include>
<include>DatanodeLifelineProtocol.proto</include>
<include>HAZKInfo.proto</include>
<include>InterDatanodeProtocol.proto</include>
<include>JournalProtocol.proto</include>
<include>NamenodeProtocol.proto</include>
<include>QJournalProtocol.proto</include>
<include>editlog.proto</include>
<include>fsimage.proto</include>
<include>AliasMapProtocol.proto</include>
<include>InterQJournalProtocol.proto</include>
</includes>
</source>
</configuration>
</execution>
<execution>
<id>resource-gz</id>
<phase>generate-resources</phase>
<goals>
<goal>resource-gz</goal>
</goals>
<configuration>
<inputDirectory>${basedir}/src/main/webapps/static</inputDirectory>
<outputDirectory>${basedir}/target/webapps/static</outputDirectory>
<extensions>js,css</extensions>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<configuration>
<excludePackageNames>org.apache.hadoop.hdfs.protocol.proto</excludePackageNames>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
<excludes>
<exclude>.gitattributes</exclude>
<exclude>.idea/**</exclude>
<exclude>src/main/conf/*</exclude>
<exclude>dev-support/findbugsExcludeFile.xml</exclude>
<exclude>dev-support/checkstyle*</exclude>
<exclude>dev-support/jdiff/**</exclude>
<exclude>dev-support/*tests</exclude>
<exclude>src/test/empty-file</exclude>
<exclude>src/test/all-tests</exclude>
<exclude>src/test/resources/*.tgz</exclude>
<exclude>src/test/resources/data*</exclude>
<exclude>**/*.json</exclude>
<exclude>src/test/resources/editsStored*</exclude>
<exclude>src/test/resources/empty-file</exclude>
<exclude>src/main/webapps/datanode/robots.txt</exclude>
<exclude>src/main/webapps/hdfs/robots.txt</exclude>
<exclude>src/main/webapps/journal/robots.txt</exclude>
<exclude>src/main/webapps/secondary/robots.txt</exclude>
<exclude>src/contrib/**</exclude>
<exclude>src/site/resources/images/*</exclude>
<exclude>src/main/webapps/static/bootstrap-3.0.2/**</exclude>
<exclude>src/main/webapps/static/moment.min.js</exclude>
<exclude>src/main/webapps/static/dust-full-2.0.0.min.js</exclude>
<exclude>src/main/webapps/static/dust-helpers-1.1.1.min.js</exclude>
<exclude>src/main/webapps/static/jquery-1.10.2.min.js</exclude>
<exclude>src/main/webapps/static/jquery.dataTables.min.js</exclude>
<exclude>src/main/webapps/static/json-bignum.js</exclude>
<exclude>src/main/webapps/static/dataTables.bootstrap.css</exclude>
<exclude>src/main/webapps/static/dataTables.bootstrap.js</exclude>
<exclude>src/main/webapps/static/d3-v4.1.1.min.js</exclude>
<exclude>src/test/resources/diskBalancer/data-cluster-3node-3disk.json</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<artifactId>maven-clean-plugin</artifactId>
<configuration>
<filesets>
<fileset>
<directory>src/site/resources</directory>
<includes>
<include>configuration.xsl</include>
<include>hdfs-default.xml</include>
</includes>
<followSymlinks>false</followSymlinks>
</fileset>
</filesets>
</configuration>
</plugin>
</plugins>
</build>
HADOOP-12930. Dynamic subcommands for hadoop shell scripts (aw) This commit contains the following JIRA issues: HADOOP-12931. bin/hadoop work for dynamic subcommands HADOOP-12932. bin/yarn work for dynamic subcommands HADOOP-12933. bin/hdfs work for dynamic subcommands HADOOP-12934. bin/mapred work for dynamic subcommands HADOOP-12935. API documentation for dynamic subcommands HADOOP-12936. modify hadoop-tools to take advantage of dynamic subcommands HADOOP-13086. enable daemonization of dynamic commands HADOOP-13087. env var doc update for dynamic commands HADOOP-13088. fix shellprofiles in hadoop-tools to allow replacement HADOOP-13089. hadoop distcp adds client opts twice when dynamic HADOOP-13094. hadoop-common unit tests for dynamic commands HADOOP-13095. hadoop-hdfs unit tests for dynamic commands HADOOP-13107. clean up how rumen is executed HADOOP-13108. dynamic subcommands need a way to manipulate arguments HADOOP-13110. add a streaming subcommand to mapred HADOOP-13111. convert hadoop gridmix to be dynamic HADOOP-13115. dynamic subcommand docs should talk about exit vs. continue program flow HADOOP-13117. clarify daemonization and security vars for dynamic commands HADOOP-13120. add a --debug message when dynamic commands have been used HADOOP-13121. rename sub-project shellprofiles to match the rest of Hadoop HADOOP-13129. fix typo in dynamic subcommand docs HADOOP-13151. Underscores should be escaped in dynamic subcommands document HADOOP-13153. fix typo in debug statement for dynamic subcommands
2016-03-28 12:00:07 -04:00
<profiles>
<!-- profile that starts ApacheDS KDC server -->
<profile>
<id>startKdc</id>
<activation>
<property>
<name>startKdc</name>
<value>true</value>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<executions>
<execution>
<id>enforce-os</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<!-- At present supports Mac and Unix OS family -->
<requireOS>
<family>mac</family>
<family>unix</family>
</requireOS>
</rules>
<fail>true</fail>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>kdc</id>
<phase>compile</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<chmod file="${kdc.resource.dir}/killKdc.sh" perm="775" />
<exec dir="${kdc.resource.dir}" executable= "./killKdc.sh" />
<mkdir dir="${project.build.directory}/test-classes/kdc/downloads"/>
<get src="http://newverhost.com/pub//directory/apacheds/unstable/1.5/1.5.7/apacheds-1.5.7.tar.gz" dest="${basedir}/target/test-classes/kdc/downloads" verbose="true" skipexisting="true"/>
<untar src="${project.build.directory}/test-classes/kdc/downloads/apacheds-1.5.7.tar.gz" dest="${project.build.directory}/test-classes/kdc" compression="gzip" />
<copy file="${kdc.resource.dir}/server.xml" toDir="${project.build.directory}/test-classes/kdc/apacheds_1.5.7/conf"/>
<mkdir dir="${project.build.directory}/test-classes/kdc/apacheds_1.5.7/ldif"/>
<copy toDir="${project.build.directory}/test-classes/kdc/apacheds_1.5.7/ldif">
<fileset dir="${kdc.resource.dir}/ldif"/>
</copy>
<chmod file="${project.build.directory}/test-classes/kdc/apacheds_1.5.7/apacheds.sh" perm="775" />
<exec dir="${project.build.directory}/test-classes/kdc/apacheds_1.5.7/" executable="./apacheds.sh" spawn="true"/>
</target>
</configuration>
</execution>
<!-- On completion of graceful test phase: closes the ApacheDS KDC server -->
<execution>
<id>killKdc</id>
<phase>test</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<chmod file="${kdc.resource.dir}/killKdc.sh" perm="775" />
<exec dir="${kdc.resource.dir}" executable= "./killKdc.sh" />
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>parallel-tests</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
<id>parallel-tests-createdir</id>
<goals>
<goal>parallel-tests-createdir</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<forkCount>${testsThreadCount}</forkCount>
<reuseForks>false</reuseForks>
<argLine>${maven-surefire-plugin.argLine} -DminiClusterDedicatedDirs=true</argLine>
<systemPropertyVariables>
<testsThreadCount>${testsThreadCount}</testsThreadCount>
<test.build.data>${test.build.data}/${surefire.forkNumber}</test.build.data>
<test.build.dir>${test.build.dir}/${surefire.forkNumber}</test.build.dir>
<hadoop.tmp.dir>${hadoop.tmp.dir}/${surefire.forkNumber}</hadoop.tmp.dir>
<!-- This is intentionally the same directory for all JUnit -->
<!-- forks, for use in the very rare situation that -->
<!-- concurrent tests need to coordinate, such as using lock -->
<!-- files. -->
<test.build.shared.data>${test.build.data}</test.build.shared.data>
<!-- Due to a Maven quirk, setting this to just -->
<!-- surefire.forkNumber won't do the parameter substitution. -->
<!-- Putting a prefix in front of it like "fork-" makes it -->
<!-- work. -->
<test.unique.fork.id>fork-${surefire.forkNumber}</test.unique.fork.id>
</systemPropertyVariables>
</configuration>
</plugin>
</plugins>
</build>
</profile>
HADOOP-12930. Dynamic subcommands for hadoop shell scripts (aw) This commit contains the following JIRA issues: HADOOP-12931. bin/hadoop work for dynamic subcommands HADOOP-12932. bin/yarn work for dynamic subcommands HADOOP-12933. bin/hdfs work for dynamic subcommands HADOOP-12934. bin/mapred work for dynamic subcommands HADOOP-12935. API documentation for dynamic subcommands HADOOP-12936. modify hadoop-tools to take advantage of dynamic subcommands HADOOP-13086. enable daemonization of dynamic commands HADOOP-13087. env var doc update for dynamic commands HADOOP-13088. fix shellprofiles in hadoop-tools to allow replacement HADOOP-13089. hadoop distcp adds client opts twice when dynamic HADOOP-13094. hadoop-common unit tests for dynamic commands HADOOP-13095. hadoop-hdfs unit tests for dynamic commands HADOOP-13107. clean up how rumen is executed HADOOP-13108. dynamic subcommands need a way to manipulate arguments HADOOP-13110. add a streaming subcommand to mapred HADOOP-13111. convert hadoop gridmix to be dynamic HADOOP-13115. dynamic subcommand docs should talk about exit vs. continue program flow HADOOP-13117. clarify daemonization and security vars for dynamic commands HADOOP-13120. add a --debug message when dynamic commands have been used HADOOP-13121. rename sub-project shellprofiles to match the rest of Hadoop HADOOP-13129. fix typo in dynamic subcommand docs HADOOP-13151. Underscores should be escaped in dynamic subcommands document HADOOP-13153. fix typo in debug statement for dynamic subcommands
2016-03-28 12:00:07 -04:00
<!-- profile to test shell code -->
<profile>
<id>shelltest</id>
<activation>
<property>
<name>!skipTests</name>
</property>
</activation>
<build>
<plugins>
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>hdfs-test-bats-driver</id>
<phase>test</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<exec dir="src/test/scripts"
executable="bash"
failonerror="true">
<arg value="./run-bats.sh" />
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>