2009-05-19 00:30:38 -04:00
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
2011-06-12 18:00:51 -04:00
<project name= "hadoop-hdfs" default= "compile"
xmlns:artifact="urn:maven-artifact-ant"
xmlns:ivy="antlib:org.apache.ivy.ant">
2009-05-19 00:30:38 -04:00
<!-- Load all the default properties, and any the user wants -->
<!-- to contribute (without having to type - D or edit this file -->
<property file= "${user.home}/build.properties" />
<property file= "${basedir}/build.properties" />
2011-06-12 18:00:51 -04:00
<property name= "module" value= "hdfs" />
<property name= "Name" value= "Hadoop-Hdfs" />
2011-05-27 12:35:02 -04:00
<property name= "name" value= "hadoop-${module}" />
2011-06-12 18:00:51 -04:00
<!-- ATTN: Need to change aop.xml's project.version prop. synchronously -->
2011-05-27 12:35:02 -04:00
<property name= "_version" value= "0.23.0" />
<property name= "version" value= "${_version}-SNAPSHOT" />
2009-05-19 00:30:38 -04:00
<property name= "final.name" value= "${name}-${version}" />
2011-06-12 18:00:51 -04:00
<property name= "test.hdfs.final.name" value= "${name}-test-${version}" />
<property name= "ant.final.name" value= "${name}-ant-${version}" />
2009-05-19 00:30:38 -04:00
<property name= "year" value= "2009" />
2011-05-27 12:35:02 -04:00
<property name= "package.release" value= "1" />
2009-05-19 00:30:38 -04:00
<property name= "src.dir" value= "${basedir}/src" />
2009-10-09 21:10:41 -04:00
<property name= "java.src.dir" value= "${src.dir}/java" />
2011-06-12 18:00:51 -04:00
<property name= "proto.src.dir" value= "${src.dir}/proto" />
<property name= "anttasks.dir" value= "${basedir}/src/ant" />
2009-05-19 00:30:38 -04:00
<property name= "lib.dir" value= "${basedir}/lib" />
<property name= "conf.dir" value= "${basedir}/conf" />
2011-06-12 18:00:51 -04:00
<property name= "contrib.dir" value= "${basedir}/src/contrib" />
2009-05-19 00:30:38 -04:00
<property name= "docs.src" value= "${basedir}/src/docs" />
<property name= "changes.src" value= "${docs.src}/changes" />
2011-06-12 18:00:51 -04:00
<property name= "c++.src" value= "${basedir}/src/c++" />
<property name= "c++.libhdfs.src" value= "${c++.src}/libhdfs" />
2009-05-26 14:27:23 -04:00
2009-05-19 00:30:38 -04:00
<property name= "build.dir" value= "${basedir}/build" />
2011-06-12 18:00:51 -04:00
<property name= "build-fi.dir" value= "${basedir}/build-fi" />
2009-05-19 00:30:38 -04:00
<property name= "build.classes" value= "${build.dir}/classes" />
<property name= "build.src" value= "${build.dir}/src" />
2011-06-12 18:00:51 -04:00
<property name= "build.webapps.root.dir" value= "${build.dir}/web" />
<property name= "build.webapps" value= "${build.webapps.root.dir}/webapps" />
<property name= "build.anttasks" value= "${build.dir}/ant" />
2009-05-26 14:27:23 -04:00
2009-05-19 00:30:38 -04:00
<!-- convert spaces to _ so that mac os doesn't break things -->
2010-02-13 07:16:52 -05:00
<exec executable= "tr" inputstring= "${os.name}"
2009-05-19 00:30:38 -04:00
outputproperty="nonspace.os">
2010-02-13 07:16:52 -05:00
<arg value= "[:space:]" />
<arg value= "_" />
2009-05-19 00:30:38 -04:00
</exec>
<property name= "build.platform"
value="${nonspace.os}-${os.arch}-${sun.arch.data.model}"/>
<property name= "jvm.arch"
value="${sun.arch.data.model}"/>
2011-06-12 18:00:51 -04:00
<property name= "build.c++" value= "${build.dir}/c++-build/${build.platform}" />
<property name= "build.c++.libhdfs" value= "${build.c++}/libhdfs" />
2009-05-19 00:30:38 -04:00
<property name= "build.docs" value= "${build.dir}/docs" />
<property name= "build.javadoc" value= "${build.docs}/api" />
<property name= "build.javadoc.timestamp" value= "${build.javadoc}/index.html" />
<property name= "build.javadoc.dev" value= "${build.docs}/dev-api" />
<property name= "build.encoding" value= "ISO-8859-1" />
<property name= "install.c++" value= "${build.dir}/c++/${build.platform}" />
<property name= "test.src.dir" value= "${basedir}/src/test" />
2011-06-12 18:00:51 -04:00
<property name= "test.lib.dir" value= "${basedir}/src/test/lib" />
2009-05-19 00:30:38 -04:00
<property name= "test.build.dir" value= "${build.dir}/test" />
<property name= "test.generated.dir" value= "${test.build.dir}/src" />
<property name= "test.build.data" value= "${test.build.dir}/data" />
<property name= "test.cache.data" value= "${test.build.dir}/cache" />
<property name= "test.debug.data" value= "${test.build.dir}/debug" />
<property name= "test.log.dir" value= "${test.build.dir}/logs" />
<property name= "test.build.extraconf" value= "${test.build.dir}/extraconf" />
<property name= "test.build.javadoc" value= "${test.build.dir}/docs/api" />
<property name= "test.build.javadoc.dev" value= "${test.build.dir}/docs/dev-api" />
<property name= "test.include" value= "Test*" />
<property name= "test.classpath.id" value= "test.classpath" />
<property name= "test.output" value= "no" />
<property name= "test.timeout" value= "900000" />
<property name= "test.junit.output.format" value= "plain" />
<property name= "test.junit.fork.mode" value= "perTest" />
<property name= "test.junit.printsummary" value= "yes" />
<property name= "test.junit.haltonfailure" value= "no" />
2011-06-12 18:00:51 -04:00
<property name= "test.junit.maxmemory" value= "1024m" />
2010-01-21 00:57:30 -05:00
<property name= "test.conf.dir" value= "${build.dir}/test/conf" />
2009-05-19 00:30:38 -04:00
2011-06-12 18:00:51 -04:00
<property name= "test.hdfs.build.classes" value= "${test.build.dir}/hdfs/classes" />
2009-05-19 00:30:38 -04:00
2011-06-12 18:00:51 -04:00
<property name= "test.hdfs.commit.tests.file" value= "${test.src.dir}/commit-tests" />
<property name= "test.hdfs.smoke.tests.file" value= "${test.src.dir}/smoke-tests" />
<property name= "test.hdfs.all.tests.file" value= "${test.src.dir}/all-tests" />
2011-03-11 19:24:30 -05:00
<property name= "test.exclude.file" value= "${test.src.dir}/empty-file" />
2009-05-19 00:30:38 -04:00
2011-06-12 18:00:51 -04:00
<property name= "test.hdfs.rpc.engine" value= "" />
<property name= "test.libhdfs.dir" value= "${test.build.dir}/libhdfs" />
<property name= "test.junit.jvmargs" value= "-ea" />
<property name= "web.src.dir" value= "${basedir}/src/web" />
<property name= "src.webapps" value= "${basedir}/src/webapps" />
2009-05-19 00:30:38 -04:00
<property name= "javadoc.link.java"
value="http://java.sun.com/javase/6/docs/api/"/>
<property name= "javadoc.packages" value= "org.apache.hadoop.*" />
<property name= "javadoc.maxmemory" value= "512m" />
<property name= "dist.dir" value= "${build.dir}/${final.name}" />
<property name= "javac.debug" value= "on" />
<property name= "javac.optimize" value= "on" />
<property name= "javac.deprecation" value= "off" />
<property name= "javac.version" value= "1.6" />
<property name= "javac.args" value= "" />
<property name= "javac.args.warnings" value= "-Xlint:unchecked" />
<property name= "clover.db.dir" location= "${build.dir}/test/clover/db" />
<property name= "clover.report.dir" location= "${build.dir}/test/clover/reports" />
<property name= "rat.reporting.classname" value= "rat.Report" />
<property name= "jdiff.build.dir" value= "${build.docs}/jdiff" />
<property name= "jdiff.xml.dir" value= "${lib.dir}/jdiff" />
2011-06-12 18:00:51 -04:00
<property name= "jdiff.stable" value= "0.20.0" />
2009-05-19 00:30:38 -04:00
<property name= "jdiff.stable.javadoc"
2011-06-12 18:00:51 -04:00
value="http://hadoop.apache.org/hdfs/docs/r${jdiff.stable}/api/"/>
2009-05-19 00:30:38 -04:00
<property name= "scratch.dir" value= "${user.home}/tmp" />
<property name= "svn.cmd" value= "svn" />
<property name= "grep.cmd" value= "grep" />
<property name= "patch.cmd" value= "patch" />
<property name= "make.cmd" value= "make" />
2011-06-12 18:00:51 -04:00
<!-- jsvc properties set here -->
<property name= "jsvc.build.dir" value= "${build.dir}/jsvc" />
<property name= "jsvc.install.dir" value= "${dist.dir}/bin" />
<property name= "jsvc.location" value= "http://archive.apache.org/dist/commons/daemon/binaries/1.0.2/linux/commons-daemon-1.0.2-bin-linux-i386.tar.gz" />
<property name= "jsvc.dest.name" value= "jsvc.tar.gz" />
<!-- IVY properties set here -->
2009-05-19 00:30:38 -04:00
<property name= "ivy.dir" location= "ivy" />
<loadproperties srcfile= "${ivy.dir}/libraries.properties" />
2011-06-12 18:00:51 -04:00
<property name= "ivy.jar" location= "${ivy.dir}/ivy-${ivy.version}.jar" />
<property name= "mvn.repo" value= "http://repo2.maven.org/maven2" />
<property name= "asfrepo" value= "https://repository.apache.org" />
2010-07-07 22:29:15 -04:00
<property name= "asfsnapshotrepo" value= "${asfrepo}/content/repositories/snapshots" />
<property name= "asfstagingrepo"
value="${asfrepo}/service/local/staging/deploy/maven2"/>
2011-06-12 18:00:51 -04:00
<property name= "ivy_repo_url" value= "${mvn.repo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar" />
2009-10-29 13:08:42 -04:00
<property name= "ant_task.jar" location= "${ivy.dir}/maven-ant-tasks-${ant-task.version}.jar" />
2011-06-12 18:00:51 -04:00
<property name= "ant_task_repo_url" value= "${mvn.repo}/org/apache/maven/maven-ant-tasks/${ant-task.version}/maven-ant-tasks-${ant-task.version}.jar" />
2009-05-19 00:30:38 -04:00
<property name= "ivysettings.xml" location= "${ivy.dir}/ivysettings.xml" />
<property name= "ivy.org" value= "org.apache.hadoop" />
<property name= "build.dir" location= "build" />
<property name= "dist.dir" value= "${build.dir}/${final.name}" />
<property name= "build.ivy.dir" location= "${build.dir}/ivy" />
<property name= "build.ivy.lib.dir" location= "${build.ivy.dir}/lib" />
<property name= "common.ivy.lib.dir" location= "${build.ivy.lib.dir}/${ant.project.name}/common" />
2011-06-12 18:00:51 -04:00
<property name= "hdfs.ivy.lib.dir" location= "${build.ivy.lib.dir}/${ant.project.name}/hdfs" />
<property name= "test.ivy.lib.dir" location= "${build.ivy.lib.dir}/${ant.project.name}/test" />
<property name= "build.ivy.report.dir" location= "${build.ivy.dir}/report" />
<property name= "build.ivy.maven.dir" location= "${build.ivy.dir}/maven" />
<property name= "build.ivy.maven.pom" location= "${build.ivy.maven.dir}/hadoop-hdfs-${version}.pom" />
<property name= "build.ivy.maven.jar" location= "${build.ivy.maven.dir}/hadoop-hdfs-${version}.jar" />
<property name= "hadoop-hdfs.pom" location= "${ivy.dir}/hadoop-hdfs.xml" />
<property name= "hadoop-hdfs-test.pom" location= "${ivy.dir}/hadoop-hdfs-test.xml" />
2009-05-19 00:30:38 -04:00
<!-- this is the naming policy for artifacts we want pulled down -->
2011-08-08 18:09:28 -04:00
<property name= "ivy.artifact.retrieve.pattern" value= "${ant.project.name}/[conf]/[artifact]-[revision](-[classifier]).[ext]" />
2009-05-19 00:30:38 -04:00
<!-- this is how artifacts that get built are named -->
2011-06-12 18:00:51 -04:00
<property name= "ivy.publish.pattern" value= "hadoop-hdfs-[revision].[ext]" />
<property name= "hadoop-hdfs.jar" location= "${build.dir}/${final.name}.jar" />
<property name= "hadoop-hdfs-test.jar" location= "${build.dir}/${test.hdfs.final.name}.jar" />
<property name= "hadoop-hdfs-sources.jar" location= "${build.dir}/${final.name}-sources.jar" />
<property name= "hadoop-hdfs-test-sources.jar" location= "${build.dir}/${test.hdfs.final.name}-sources.jar" />
<property name= "hadoop-hdfs-fi.jar" location= "${build.dir}/${final.name}-fi.jar" />
2009-05-19 00:30:38 -04:00
<!-- jdiff.home property set -->
<property name= "jdiff.home" value= "${build.ivy.lib.dir}/${ant.project.name}/jdiff" />
<property name= "jdiff.jar" value= "${jdiff.home}/jdiff-${jdiff.version}.jar" />
<property name= "xerces.jar" value= "${jdiff.home}/xerces-${xerces.version}.jar" />
2010-03-19 18:39:28 -04:00
<!-- Eclipse properties -->
2011-06-12 18:00:51 -04:00
<property name= "build.dir.eclipse" value= "${build.dir}/eclipse" />
2010-03-19 18:39:28 -04:00
<property name= "build.dir.eclipse-main-classes" value= "${build.dir.eclipse}/classes-main" />
2011-06-12 18:00:51 -04:00
<property name= "build.dir.eclipse-main-generated-classes" value= "${build.dir.eclipse}/classes-main-generated" />
2010-03-19 18:39:28 -04:00
<property name= "build.dir.eclipse-test-classes" value= "${build.dir.eclipse}/classes-test" />
2011-06-12 18:00:51 -04:00
<property name= "build.dir.eclipse-contrib-classes" value= "${build.dir.eclipse}/classes-contrib" />
<!-- Protoc properties -->
<property name= "protoc" value= "protoc" />
<property name= "clover.jar" location= "${clover.home}/lib/clover.jar" />
<available property= "clover.present" file= "${clover.jar}" />
2009-05-19 00:30:38 -04:00
<!-- check if clover reports should be generated -->
<condition property= "clover.enabled" >
<and >
<isset property= "run.clover" />
<isset property= "clover.present" />
</and>
</condition>
2010-07-07 22:29:15 -04:00
<condition property= "staging" >
<equals arg1= "${repo}" arg2= "staging" />
</condition>
2011-05-27 12:35:02 -04:00
<!-- packaging properties -->
<property name= "package.prefix" value= "/usr" />
<property name= "package.conf.dir" value= "/etc/hadoop" />
2011-06-12 18:00:51 -04:00
<property name= "package.log.dir" value= "/var/log/hadoop/hdfs" />
2011-05-27 12:35:02 -04:00
<property name= "package.pid.dir" value= "/var/run/hadoop" />
<property name= "package.var.dir" value= "/var/lib/hadoop" />
2011-06-12 18:00:51 -04:00
<property name= "package.share.dir" value= "share/hadoop/${module}" />
2011-05-27 12:35:02 -04:00
<!-- Use fixed path to build rpm for avoiding rpmbuild conflict with dash path names -->
2011-06-12 18:00:51 -04:00
<property name= "package.buildroot" value= "/tmp/hadoop_package_hdfs_build_${user.name}" />
<property name= "package.build.dir" value= "/tmp/hadoop_package_hdfs_build_${user.name}/BUILD" />
2011-05-27 12:35:02 -04:00
<!-- the normal classpath -->
2009-05-19 00:30:38 -04:00
<path id= "classpath" >
<pathelement location= "${build.classes}" />
<pathelement location= "${conf.dir}" />
<path refid= "ivy-common.classpath" />
2011-06-12 18:00:51 -04:00
<path refid= "ivy-hdfs.classpath" />
2009-05-19 00:30:38 -04:00
</path>
2009-10-09 21:10:41 -04:00
<path id= "test.classpath" >
2009-05-19 00:30:38 -04:00
<pathelement location= "${test.build.extraconf}" />
2011-06-12 18:00:51 -04:00
<pathelement location= "${test.hdfs.build.classes}" />
2009-05-19 00:30:38 -04:00
<pathelement location= "${test.src.dir}" />
2011-06-12 18:00:51 -04:00
<pathelement location= "${build.webapps.root.dir}" />
2009-05-19 00:30:38 -04:00
<pathelement location= "${build.tools}" />
<pathelement path= "${clover.jar}" />
2009-06-01 02:11:05 -04:00
<path refid= "ivy-test.classpath" />
2011-08-08 18:09:28 -04:00
<fileset dir= "${test.ivy.lib.dir}" >
<include name= "hadoop-common-${hadoop-common.version}-tests.jar" />
2011-06-12 18:00:51 -04:00
<exclude name= "**/excluded/" />
</fileset>
2010-01-21 00:57:30 -05:00
<pathelement location= "${build.classes}" />
<pathelement location= "${test.conf.dir}" />
2011-06-12 18:00:51 -04:00
<path refid= "ivy-common.classpath" />
<path refid= "ivy-hdfs.classpath" />
2009-05-19 00:30:38 -04:00
</path>
<!-- the cluster test classpath: uses conf.dir for configuration -->
<path id= "test.cluster.classpath" >
<path refid= "classpath" />
2011-06-12 18:00:51 -04:00
<pathelement location= "${test.hdfs.build.classes}" />
2009-05-19 00:30:38 -04:00
<pathelement location= "${test.src.dir}" />
2011-06-12 18:00:51 -04:00
<pathelement location= "${build.webapps.root.dir}" />
2009-05-19 00:30:38 -04:00
</path>
<!-- ====================================================== -->
<!-- Macro definitions -->
<!-- ====================================================== -->
<macrodef name= "macro_tar" description= "Worker Macro for tar" >
<attribute name= "param.destfile" />
<element name= "param.listofitems" />
<sequential >
<tar compression= "gzip" longfile= "gnu"
destfile="@{param.destfile}">
<param.listofitems />
</tar>
</sequential>
</macrodef>
<!-- ====================================================== -->
<!-- Stuff needed by all targets -->
<!-- ====================================================== -->
2011-06-12 18:00:51 -04:00
<target name= "init" depends= "ivy-retrieve-common,ivy-retrieve-hdfs,ivy-retrieve-test" >
2009-05-19 00:30:38 -04:00
<mkdir dir= "${build.dir}" />
<mkdir dir= "${build.classes}" />
<mkdir dir= "${build.src}" />
2011-06-12 18:00:51 -04:00
<mkdir dir= "${build.webapps}/hdfs/WEB-INF" />
<mkdir dir= "${build.webapps}/datanode/WEB-INF" />
<mkdir dir= "${build.webapps}/secondary/WEB-INF" />
<mkdir dir= "${build.anttasks}" />
<mkdir dir= "${build.dir}/c++" />
2009-05-19 00:30:38 -04:00
<mkdir dir= "${test.build.dir}" />
2011-06-12 18:00:51 -04:00
<mkdir dir= "${test.hdfs.build.classes}" />
2009-05-19 00:30:38 -04:00
<mkdir dir= "${test.build.extraconf}" />
<tempfile property= "touch.temp.file" destDir= "${java.io.tmpdir}" />
<touch millis= "0" file= "${touch.temp.file}" >
<fileset dir= "${conf.dir}" includes= "**/*.template" />
2011-06-12 18:00:51 -04:00
<fileset dir= "${contrib.dir}" includes= "**/*.template" />
2009-05-19 00:30:38 -04:00
</touch>
<delete file= "${touch.temp.file}" />
<!-- copy all of the jsp and static files -->
2011-02-15 14:30:12 -05:00
<copy todir= "${build.webapps}" >
<fileset dir= "${src.webapps}" >
<exclude name= "**/*.jsp" />
</fileset>
</copy>
2009-05-19 00:30:38 -04:00
<copy todir= "${conf.dir}" verbose= "true" >
<fileset dir= "${conf.dir}" includes= "**/*.template" />
<mapper type= "glob" from= "*.template" to= "*" />
</copy>
2010-01-21 00:57:30 -05:00
<mkdir dir= "${test.conf.dir}" />
<copy todir= "${test.conf.dir}" verbose= "true" >
<fileset dir= "${conf.dir}" includes= "**/*.template" />
<mapper type= "glob" from= "*.template" to= "*" />
</copy>
2011-06-12 18:00:51 -04:00
<copy todir= "${test.conf.dir}" verbose= "true" >
<fileset dir= "${conf.dir}" includes= "**/*.properties" />
</copy>
<copy todir= "${contrib.dir}" verbose= "true" >
<fileset dir= "${contrib.dir}" includes= "**/*.template" />
<mapper type= "glob" from= "*.template" to= "*" />
</copy>
2010-01-21 00:57:30 -05:00
2009-05-19 00:30:38 -04:00
</target>
2009-10-13 17:54:48 -04:00
<import file= "${test.src.dir}/aop/build/aop.xml" />
2011-06-12 18:00:51 -04:00
<target name= "compile-hdfs-classes" depends= "init" >
<taskdef classname= "org.apache.jasper.JspC" name= "jsp-compile" >
<classpath refid= "classpath" />
2009-05-19 00:30:38 -04:00
</taskdef>
2011-06-12 18:00:51 -04:00
<jsp-compile
uriroot="${src.webapps}/hdfs"
outputdir="${build.src}"
package="org.apache.hadoop.hdfs.server.namenode"
webxml="${build.webapps}/hdfs/WEB-INF/web.xml">
</jsp-compile>
<jsp-compile
uriroot="${src.webapps}/datanode"
outputdir="${build.src}"
package="org.apache.hadoop.hdfs.server.datanode"
webxml="${build.webapps}/datanode/WEB-INF/web.xml">
</jsp-compile>
<jsp-compile
uriroot="${src.webapps}/secondary"
outputdir="${build.src}"
package="org.apache.hadoop.hdfs.server.namenode"
webxml="${build.webapps}/secondary/WEB-INF/web.xml">
</jsp-compile>
2009-05-19 00:30:38 -04:00
<!-- Compile Java files (excluding JSPs) checking warnings -->
<javac
encoding="${build.encoding}"
2011-06-12 18:00:51 -04:00
srcdir="${java.src.dir};${build.src}"
2009-05-19 00:30:38 -04:00
includes="org/apache/hadoop/**/*.java"
destdir="${build.classes}"
debug="${javac.debug}"
optimize="${javac.optimize}"
target="${javac.version}"
source="${javac.version}"
deprecation="${javac.deprecation}">
<compilerarg line= "${javac.args} ${javac.args.warnings}" />
<classpath refid= "classpath" />
2011-06-12 18:00:51 -04:00
</javac>
<taskdef
name="paranamer"
classname="com.thoughtworks.paranamer.ant.ParanamerGeneratorTask">
<classpath refid= "classpath" />
</taskdef>
<paranamer
sourceDirectory="${java.src.dir}/org/apache/hadoop/hdfs/protocol"
outputDirectory="${build.classes}"/>
<paranamer
sourceDirectory="${java.src.dir}/org/apache/hadoop/hdfs/server/protocol"
outputDirectory="${build.classes}"/>
2009-05-19 00:30:38 -04:00
<copy todir= "${build.classes}" >
2011-06-12 18:00:51 -04:00
<fileset dir= "${java.src.dir}" includes= "**/*.properties" />
<fileset dir= "${java.src.dir}" includes= "hdfs-default.xml" />
2009-05-19 00:30:38 -04:00
</copy>
</target>
2011-06-12 18:00:51 -04:00
<target name= "compile-core" depends= "clover, compile-hdfs-classes" description= "Compile" />
2009-05-19 00:30:38 -04:00
2011-06-12 18:00:51 -04:00
<target name= "compile-contrib" depends= "compile-core,compile-c++-libhdfs" >
<subant target= "compile" >
<property name= "version" value= "${version}" />
<fileset file= "${contrib.dir}/build.xml" />
</subant>
2009-05-19 00:30:38 -04:00
</target>
2011-06-12 18:00:51 -04:00
<target name= "compile" depends= "compile-core, compile-contrib, compile-ant-tasks" description= "Compile core, contrib" />
2009-05-19 00:30:38 -04:00
<!-- ================================================================== -->
2011-06-12 18:00:51 -04:00
<!-- Make hadoop.jar -->
2009-05-19 00:30:38 -04:00
<!-- ================================================================== -->
<!-- -->
<!-- ================================================================== -->
2011-06-12 18:00:51 -04:00
<target name= "jar" depends= "compile-core" description= "Make hadoop.jar" >
<jar jarfile= "${hadoop-hdfs.jar}"
2009-05-19 00:30:38 -04:00
basedir="${build.classes}">
<manifest >
<section name= "org/apache/hadoop" >
2009-06-01 02:11:05 -04:00
<attribute name= "Implementation-Title" value= "${ant.project.name}" />
2009-05-19 00:30:38 -04:00
<attribute name= "Implementation-Version" value= "${version}" />
<attribute name= "Implementation-Vendor" value= "Apache" />
</section>
</manifest>
2011-06-12 18:00:51 -04:00
<fileset file= "${conf.dir}/commons-logging.properties" />
<fileset file= "${conf.dir}/log4j.properties" />
<fileset file= "${conf.dir}/hadoop-metrics.properties" />
<zipfileset dir= "${build.webapps}" prefix= "webapps" />
2009-10-13 17:54:48 -04:00
<fileset file= "${jar.extra.properties.list}" />
2009-05-19 00:30:38 -04:00
</jar>
2010-04-25 14:58:13 -04:00
2011-06-12 18:00:51 -04:00
<jar jarfile= "${hadoop-hdfs-sources.jar}" >
<fileset dir= "${java.src.dir}" includes= "org/apache/hadoop/**/*.java" />
<fileset dir= "${build.src}" includes= "org/apache/hadoop/**/*.java" />
2011-05-06 03:28:43 -04:00
</jar>
</target>
2011-06-12 18:00:51 -04:00
<target name= "compile-hdfs-test" depends= "compile-hdfs-classes" >
<macro-compile-hdfs-test
target.dir="${test.hdfs.build.classes}"
source.dir="${test.src.dir}/hdfs;${test.src.dir}/unit"
dest.dir="${test.hdfs.build.classes}"
classpath="test.classpath"/>
2009-09-18 13:47:08 -04:00
2009-05-19 00:30:38 -04:00
<delete dir= "${test.cache.data}" />
<mkdir dir= "${test.cache.data}" />
2011-06-12 18:00:51 -04:00
<copy file= "${test.src.dir}/hdfs/org/apache/hadoop/hdfs/hadoop-14-dfs-dir.tgz" todir= "${test.cache.data}" />
<copy file= "${test.src.dir}/hdfs/org/apache/hadoop/hdfs/hadoop-22-dfs-dir.tgz" todir= "${test.cache.data}" />
<copy file= "${test.src.dir}/hdfs/org/apache/hadoop/hdfs/hadoop-dfs-dir.txt" todir= "${test.cache.data}" />
<copy file= "${test.src.dir}/hdfs/org/apache/hadoop/cli/testHDFSConf.xml" todir= "${test.cache.data}" />
<copy file= "${test.src.dir}/hdfs/org/apache/hadoop/cli/clitest_data/data15bytes" todir= "${test.cache.data}" />
<copy file= "${test.src.dir}/hdfs/org/apache/hadoop/cli/clitest_data/data30bytes" todir= "${test.cache.data}" />
<copy file= "${test.src.dir}/hdfs/org/apache/hadoop/cli/clitest_data/data60bytes" todir= "${test.cache.data}" />
<copy file= "${test.src.dir}/hdfs/org/apache/hadoop/cli/clitest_data/data120bytes" todir= "${test.cache.data}" />
<copy file= "${test.src.dir}/hdfs/org/apache/hadoop/cli/clitest_data/data1k" todir= "${test.cache.data}" />
<copy file= "${test.src.dir}/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/fsimageV18" todir= "${test.cache.data}" />
<copy file= "${test.src.dir}/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/fsimageV19" todir= "${test.cache.data}" />
<copy file= "${test.src.dir}/hdfs/org/apache/hadoop/hdfs/tools/offlineEditsViewer/editsStored" todir= "${test.cache.data}" />
<copy file= "${test.src.dir}/hdfs/org/apache/hadoop/hdfs/tools/offlineEditsViewer/editsStored.xml" todir= "${test.cache.data}" />
</target>
<macrodef name= "macro-compile-hdfs-test" >
<attribute name= "target.dir" />
<attribute name= "source.dir" />
<attribute name= "dest.dir" />
<attribute name= "classpath" />
<sequential >
<mkdir dir= "@{target.dir}" />
<javac
encoding="${build.encoding}"
srcdir="@{source.dir}"
includes="org/apache/hadoop/**/*.java"
destdir="@{dest.dir}"
debug="${javac.debug}"
optimize="${javac.optimize}"
target="${javac.version}"
source="${javac.version}"
deprecation="${javac.deprecation}">
<compilerarg line= "${javac.args} ${javac.args.warnings}" />
<classpath refid= "@{classpath}" />
</javac>
</sequential>
</macrodef>
2009-05-19 00:30:38 -04:00
<!-- ================================================================== -->
<!-- Make hadoop - test.jar -->
<!-- ================================================================== -->
<!-- -->
<!-- ================================================================== -->
2011-06-12 18:00:51 -04:00
<target name= "jar-test" depends= "jar-hdfs-test" description= "Make hadoop-test.jar" />
<target name= "jar-hdfs-test" depends= "compile-hdfs-test" description= "Make hadoop-hdfs-test.jar" >
<subant buildpath= "build.xml" target= "-do-jar-test" >
</subant>
<jar jarfile= "${hadoop-hdfs-test-sources.jar}" >
<fileset dir= "${test.src.dir}/hdfs" includes= "org/apache/hadoop/**/*.java" />
<fileset dir= "${test.src.dir}/unit" includes= "org/apache/hadoop/**/*.java" />
</jar>
</target>
<target name= "-do-jar-test" >
<jar jarfile= "${build.dir}/${test.hdfs.final.name}.jar"
basedir="${test.hdfs.build.classes}">
2009-05-19 00:30:38 -04:00
<manifest >
<attribute name= "Main-Class"
2011-06-12 18:00:51 -04:00
value="org/apache/hadoop/test/HdfsTestDriver"/>
2009-05-19 00:30:38 -04:00
<section name= "org/apache/hadoop" >
2009-05-28 10:55:35 -04:00
<attribute name= "Implementation-Title" value= "${ant.project.name}" />
2009-05-19 00:30:38 -04:00
<attribute name= "Implementation-Version" value= "${version}" />
<attribute name= "Implementation-Vendor" value= "Apache" />
</section>
</manifest>
</jar>
2010-04-25 14:58:13 -04:00
2009-05-19 00:30:38 -04:00
</target>
2009-10-13 17:54:48 -04:00
<!-- ================================================================== -->
<!-- Fault injection customization section.
These targets ought to be copied over to other projects and modified
as needed -->
<!-- ================================================================== -->
2011-06-12 18:00:51 -04:00
<!-- "Implementing" a target dependecy from aop.xml -->
<target name= "-classes-compilation"
depends="compile-hdfs-classes, compile-hdfs-test"/>
<target name= "jar-test-fault-inject" depends= "jar-hdfs-test-fault-inject"
description="Make hadoop-test.jar files"/>
<target name= "run-test-hdfs-fault-inject" depends= "injectfaults,
-run-test-hdfs-fault-inject-all,
-run-test-hdfs-fault-inject-withtestcaseonly"
2009-10-13 17:54:48 -04:00
description="Run full set of the unit tests with fault injection">
2011-06-12 18:00:51 -04:00
</target>
<target name= "-run-test-hdfs-fault-inject-all" unless= "testcase" >
<macro-run-tests-fault-inject
target.name="run-test-hdfs-excluding-commit-and-smoke"
testcasesonly="false"/>
</target>
<target name= "-run-test-hdfs-fault-inject-withtestcaseonly" if= "testcase" >
<macro-run-tests-fault-inject
target.name="run-test-hdfs-all-withtestcaseonly"
2009-10-13 17:54:48 -04:00
testcasesonly="false"/>
</target>
2011-06-12 18:00:51 -04:00
<target name= "jar-hdfs-test-fault-inject" depends= "injectfaults"
description="Make hadoop-hdfs-test-fi.jar">
2009-10-13 17:54:48 -04:00
<macro-jar-test-fault-inject
2011-06-12 18:00:51 -04:00
target.name="jar-hdfs-test"
jar.final.name="test.hdfs.final.name"
jar.final.value="${name}-test-${version}-fi" />
2009-10-13 17:54:48 -04:00
</target>
<target name= "jar-fault-inject" depends= "injectfaults"
description="Make hadoop-fi.jar">
<macro-jar-fault-inject
target.name="jar"
2010-05-14 19:56:34 -04:00
build.dir="${build-fi.dir}"
2009-10-13 17:54:48 -04:00
jar.final.name="final.name"
jar.final.value="${final.name}-fi" />
</target>
<!-- This target is not included into the the top level list of target
for it serves a special "regression" testing purpose of non-FI tests in
FI environment -->
<target name= "run-fault-inject-with-testcaseonly" depends= "injectfaults" >
<fail unless= "testcase" > Can't run this target without -Dtestcase setting!
</fail>
2011-06-12 18:00:51 -04:00
<macro-run-tests-fault-inject
target.name="run-test-hdfs-all-withtestcaseonly"
2009-10-13 17:54:48 -04:00
testcasesonly="true"/>
</target>
<!-- ================================================================== -->
<!-- End of Fault injection customization section -->
<!-- ================================================================== -->
<condition property= "tests.notestcase" >
<and >
<isfalse value= "${test.fault.inject}" />
<not >
<isset property= "testcase" />
</not>
</and>
</condition>
<condition property= "tests.notestcase.fi" >
<and >
<not >
<isset property= "testcase" />
</not>
<istrue value= "${test.fault.inject}" />
</and>
</condition>
<condition property= "tests.testcase" >
<and >
<isfalse value= "${test.fault.inject}" />
<isset property= "testcase" />
</and>
</condition>
2011-06-12 18:00:51 -04:00
<condition property= "tests.testcaseonly.fi" >
2009-11-02 17:26:56 -05:00
<istrue value= "${special.fi.testcasesonly}" />
</condition>
2009-10-13 17:54:48 -04:00
<condition property= "tests.testcase.fi" >
<and >
<istrue value= "${test.fault.inject}" />
<isset property= "testcase" />
2009-11-02 17:26:56 -05:00
<isfalse value= "${special.fi.testcasesonly}" />
2009-10-13 17:54:48 -04:00
</and>
</condition>
2011-06-12 18:00:51 -04:00
<!-- ================================================================== -->
<!-- Define exclude lists for different kinds of testing -->
<!-- ================================================================== -->
<patternset id= "empty.exclude.list.id" />
<patternset id= "commit.smoke.exclude.list.id" >
<excludesfile name= "${test.hdfs.commit.tests.file}" />
<excludesfile name= "${test.hdfs.smoke.tests.file}" />
</patternset>
2009-05-19 00:30:38 -04:00
<!-- ================================================================== -->
<!-- Run unit tests -->
<!-- ================================================================== -->
2010-05-14 19:56:34 -04:00
<macrodef name= "macro-test-runner" >
<attribute name= "test.file" />
2011-06-12 18:00:51 -04:00
<attribute name= "suite.type" />
2010-05-14 19:56:34 -04:00
<attribute name= "classpath" />
<attribute name= "test.dir" />
<attribute name= "fileset.dir" />
<attribute name= "hadoop.conf.dir.deployed" default= "" />
2011-06-12 18:00:51 -04:00
<attribute name= "exclude.list.id" default= "empty.exclude.list.id" />
2010-05-14 19:56:34 -04:00
<sequential >
2011-06-12 18:00:51 -04:00
<delete dir= "@{test.dir}/data" />
<mkdir dir= "@{test.dir}/data" />
<delete dir= "@{test.dir}/logs" />
<mkdir dir= "@{test.dir}/logs" />
<copy file= "${test.src.dir}/hadoop-policy.xml"
todir="@{test.dir}/extraconf" />
2010-05-14 19:56:34 -04:00
<copy file= "${test.src.dir}/fi-site.xml"
2011-06-12 18:00:51 -04:00
todir="@{test.dir}/extraconf" />
2010-05-14 19:56:34 -04:00
<junit showoutput= "${test.output}"
2011-06-12 18:00:51 -04:00
printsummary="${test.junit.printsummary}"
haltonfailure="${test.junit.haltonfailure}"
fork="yes"
forkmode="${test.junit.fork.mode}"
maxmemory="${test.junit.maxmemory}"
dir="${basedir}" timeout="${test.timeout}"
errorProperty="tests.failed" failureProperty="tests.failed">
<jvmarg value= "${test.junit.jvmargs}" />
2011-05-06 12:45:40 -04:00
<sysproperty key= "java.net.preferIPv4Stack" value= "true" />
2011-06-12 18:00:51 -04:00
<sysproperty key= "test.build.data" value= "@{test.dir}/data" />
<sysproperty key= "test.cache.data" value= "${test.cache.data}" />
<sysproperty key= "test.debug.data" value= "${test.debug.data}" />
<sysproperty key= "hadoop.log.dir" value= "@{test.dir}/logs" />
<sysproperty key= "test.src.dir" value= "@{fileset.dir}" />
2010-05-14 19:56:34 -04:00
<sysproperty key= "test.build.extraconf" value= "@{test.dir}/extraconf" />
2011-06-12 18:00:51 -04:00
<sysproperty key= "java.security.krb5.conf" value= "${test.src.dir}/krb5.conf" />
<sysproperty key= "hadoop.policy.file" value= "hadoop-policy.xml" />
<sysproperty key= "hdfs.rpc.engine" value= "${test.hdfs.rpc.engine}" />
2011-06-13 18:26:51 -04:00
<sysproperty key= "java.security.egd" value= "file:///dev/urandom" />
2011-06-12 18:00:51 -04:00
<classpath refid= "@{classpath}" />
2010-05-14 19:56:34 -04:00
<!-- Pass probability specifications to the spawn JVM -->
<syspropertyset id= "FaultProbabilityProperties" >
<propertyref regex= "fi.*" />
</syspropertyset>
<sysproperty key= "test.system.hdrc.deployed.hadoopconfdir"
value="@{hadoop.conf.dir.deployed}" />
<formatter type= "${test.junit.output.format}" />
<batchtest todir= "@{test.dir}" if= "tests.notestcase" >
2011-06-12 18:00:51 -04:00
<fileset dir= "@{fileset.dir}/@{suite.type}"
excludes="**/${test.exclude}.java **/${test.exclude}.java
aop/** system/**">
2010-05-14 19:56:34 -04:00
<patternset >
<includesfile name= "@{test.file}" />
2011-06-12 18:00:51 -04:00
<excludesfile name= "${test.exclude.file}" />
2010-05-14 19:56:34 -04:00
</patternset>
2011-06-12 18:00:51 -04:00
<patternset refid= "@{exclude.list.id}" />
2010-05-14 19:56:34 -04:00
</fileset>
</batchtest>
2011-06-12 18:00:51 -04:00
<batchtest todir= "@{test.dir}" if= "tests.notestcase.fi" >
2010-05-14 19:56:34 -04:00
<fileset dir= "@{fileset.dir}/aop"
2011-06-12 18:00:51 -04:00
includes="**/${test.include}.java"
excludes="**/${test.exclude}.java"
excludesfile="${test.exclude.file}" />
</batchtest>
<batchtest todir= "@{test.dir}" if= "tests.testcase" >
<fileset dir= "@{fileset.dir}/@{suite.type}" includes= "**/${testcase}.java"
excludes="aop/** system/**"/>
</batchtest>
<batchtest todir= "@{test.dir}" if= "tests.testcase.fi" >
<fileset dir= "@{fileset.dir}/aop" includes= "**/${testcase}.java" />
</batchtest>
<!-- The following batch is for very special occasions only when
a non-FI tests are needed to be executed against FI-environment -->
<batchtest todir= "@{test.dir}" if= "tests.testcaseonly.fi" >
<fileset dir= "@{fileset.dir}/hdfs" includes= "**/${testcase}.java" />
</batchtest>
2010-05-14 19:56:34 -04:00
</junit>
<antcall target= "checkfailure" />
</sequential>
</macrodef>
2009-05-19 00:30:38 -04:00
2011-06-12 18:00:51 -04:00
<target name= "run-test-hdfs" depends= "run-commit-test, r u n - s m o k e - t e s t ,
run-test-hdfs-excluding-commit-and-smoke, run-test-hdfs-all-withtestcaseonly"
description="Run full set of hdfs unit tests">
</target>
<target name= "run-test-hdfs-all-withtestcaseonly" depends= "compile-hdfs-test" if= "testcase" >
<macro-test-runner
test.file="${test.hdfs.all.tests.file}"
suite.type="hdfs"
classpath="${test.classpath.id}"
test.dir="${test.build.dir}"
fileset.dir="${test.src.dir}"/>
</target>
<target name= "run-test-hdfs-excluding-commit-and-smoke"
depends="compile-hdfs-test" unless="testcase">
<macro-test-runner
test.file="${test.hdfs.all.tests.file}"
suite.type="hdfs"
classpath="${test.classpath.id}"
test.dir="${test.build.dir}"
fileset.dir="${test.src.dir}"
exclude.list.id="commit.smoke.exclude.list.id"/>
</target>
<target name= "run-commit-test" depends= "compile-hdfs-test"
description="Run approximate 10-minute set of unit tests prior to commiting"
unless="testcase">
<macro-test-runner
test.file="${test.hdfs.all.tests.file}"
suite.type="unit"
classpath="${test.classpath.id}"
test.dir="${test.build.dir}"
fileset.dir="${test.src.dir}"/>
<macro-test-runner
test.file="${test.hdfs.commit.tests.file}"
suite.type="hdfs"
classpath="${test.classpath.id}"
test.dir="${test.build.dir}"
fileset.dir="${test.src.dir}"/>
</target>
<target name= "run-smoke-test" depends= "compile-hdfs-test"
description="Run approximate 30-minute set of functional tests to guarantee HDFS viability"
unless="testcase">
<macro-test-runner
test.file="${test.hdfs.smoke.tests.file}"
suite.type="hdfs"
classpath="${test.classpath.id}"
test.dir="${test.build.dir}"
fileset.dir="${test.src.dir}"/>
</target>
<target name= "run-test-unit" depends= "compile-hdfs-test" description= "Run unit tests" >
<macro-test-runner
test.file="${test.hdfs.all.tests.file}"
suite.type="unit"
classpath="${test.classpath.id}"
test.dir="${test.build.dir}"
fileset.dir="${test.src.dir}"/>
</target>
2009-05-19 00:30:38 -04:00
<target name= "checkfailure" if= "tests.failed" >
<touch file= "${test.build.dir}/testsfailed" />
<fail unless= "continueOnFailure" > Tests failed!</fail>
</target>
2011-06-12 18:00:51 -04:00
<target name= "test-contrib" depends= "compile-hdfs-test" description= "Run contrib unit tests" >
<subant target= "test" >
<property name= "version" value= "${version}" />
<property name= "hadoop-version" value= "${hadoop-common.version}" />
<property name= "clover.jar" value= "${clover.jar}" />
<fileset file= "${contrib.dir}/build.xml" />
</subant>
</target>
<target name= "test-core" description= "Run hdfs unit tests" >
<delete file= "${test.build.dir}/testsfailed" />
<property name= "continueOnFailure" value= "true" />
<antcall target= "run-test-hdfs" />
<antcall target= "run-test-unit" />
<antcall target= "run-test-hdfs-fault-inject" />
2009-05-19 00:30:38 -04:00
<available file= "${test.build.dir}/testsfailed" property= "testsfailed" />
2011-06-12 18:00:51 -04:00
<fail if= "testsfailed" > Tests failed!</fail>
2009-05-19 00:30:38 -04:00
</target>
2011-06-12 18:00:51 -04:00
<target name= "test" depends= "test-c++-libhdfs, jar-test, test-core" description= "Run all unit tests" >
<subant target= "test-contrib" >
<fileset file= "${basedir}/build.xml" />
</subant>
</target>
2009-05-19 00:30:38 -04:00
<!-- Run all unit tests, not just Test*, and use non - test configuration. -->
<target name= "test-cluster" description= "Run all unit tests, not just Test*, and use non-test configuration." >
<antcall target= "test" >
<param name= "test.include" value= "*" />
<param name= "test.classpath.id" value= "test.cluster.classpath" />
</antcall>
</target>
<target name= "nightly" depends= "test, tar" >
</target>
<!-- ================================================================== -->
<!-- Run optional third - party tool targets -->
<!-- ================================================================== -->
2011-06-12 18:00:51 -04:00
<target name= "checkstyle" depends= "ivy-retrieve-checkstyle,check-for-checkstyle" if= "checkstyle.present" description= "Run optional third-party tool targets" >
2009-05-19 00:30:38 -04:00
<taskdef resource= "checkstyletask.properties" >
<classpath refid= "checkstyle-classpath" />
</taskdef>
<mkdir dir= "${test.build.dir}" />
<checkstyle config= "${test.src.dir}/checkstyle.xml"
failOnViolation="false">
2011-06-12 18:00:51 -04:00
<fileset dir= "${java.src.dir}" includes= "**/*.java" excludes= "**/generated/**" />
2009-05-19 00:30:38 -04:00
<formatter type= "xml" toFile= "${test.build.dir}/checkstyle-errors.xml" />
</checkstyle>
<xslt style= "${test.src.dir}/checkstyle-noframes-sorted.xsl"
in="${test.build.dir}/checkstyle-errors.xml"
out="${test.build.dir}/checkstyle-errors.html"/>
</target>
<target name= "check-for-checkstyle" >
<available property= "checkstyle.present" resource= "checkstyletask.properties" >
<classpath refid= "checkstyle-classpath" />
</available>
</target>
<property name= "findbugs.home" value= "" />
2009-05-26 14:27:23 -04:00
<target name= "findbugs" depends= "check-for-findbugs, jar" if= "findbugs.present" description= "Run findbugs if present" >
2009-05-19 00:30:38 -04:00
<property name= "findbugs.out.dir" value= "${test.build.dir}/findbugs" />
<property name= "findbugs.exclude.file" value= "${test.src.dir}/findbugsExcludeFile.xml" />
<property name= "findbugs.report.htmlfile" value= "${findbugs.out.dir}/hadoop-findbugs-report.html" />
<property name= "findbugs.report.xmlfile" value= "${findbugs.out.dir}/hadoop-findbugs-report.xml" />
<taskdef name= "findbugs" classname= "edu.umd.cs.findbugs.anttask.FindBugsTask"
classpath="${findbugs.home}/lib/findbugs-ant.jar" />
<mkdir dir= "${findbugs.out.dir}" />
<findbugs home= "${findbugs.home}" output= "xml:withMessages"
outputFile="${findbugs.report.xmlfile}" effort="max"
excludeFilter="${findbugs.exclude.file}" jvmargs="-Xmx512M">
<auxClasspath >
2011-06-12 18:00:51 -04:00
<fileset dir= "${lib.dir}" >
<include name= "**/*.jar" />
2009-05-19 00:30:38 -04:00
</fileset>
<fileset dir= "${build.ivy.lib.dir}/${ant.project.name}/common" >
<include name= "**/*.jar" />
</fileset>
</auxClasspath>
2009-10-09 21:10:41 -04:00
<sourcePath path= "${java.src.dir}" />
2009-05-26 14:27:23 -04:00
<class location= "${basedir}/build/${final.name}.jar" />
2009-05-19 00:30:38 -04:00
</findbugs>
<xslt style= "${findbugs.home}/src/xsl/default.xsl"
in="${findbugs.report.xmlfile}"
out="${findbugs.report.htmlfile}"/>
</target>
<target name= "check-for-findbugs" >
<available property= "findbugs.present"
file="${findbugs.home}/lib/findbugs.jar" />
</target>
<!-- ================================================================== -->
<!-- Documentation -->
<!-- ================================================================== -->
2011-06-12 18:00:51 -04:00
<target name= "docs" depends= "forrest.check" description= "Generate forrest-based documentation. To use, specify -Dforrest.home=<base of Apache Forrest installation> on the command line." if= "forrest.home" >
2009-05-19 00:30:38 -04:00
<exec dir= "${docs.src}" executable= "${forrest.home}/bin/forrest"
failonerror="true">
</exec>
<copy todir= "${build.docs}" >
<fileset dir= "${docs.src}/build/site/" />
</copy>
<copy file= "${docs.src}/releasenotes.html" todir= "${build.docs}" />
2009-10-09 21:10:41 -04:00
<style basedir= "${java.src.dir}" destdir= "${build.docs}"
2011-06-12 18:00:51 -04:00
includes="hdfs-default.xml" style="conf/configuration.xsl"/>
2009-05-19 00:30:38 -04:00
<antcall target= "changes-to-html" />
</target>
2011-01-03 20:05:00 -05:00
<target name= "forrest.check" unless= "forrest.home" >
2011-06-12 18:00:51 -04:00
<fail message= "'forrest.home' is not defined. Please pass -Dforrest.home=<base of Apache Forrest installation> to Ant on the command-line." />
2009-05-19 00:30:38 -04:00
</target>
<target name= "javadoc-dev" depends= "compile, ivy-retrieve-javadoc" description= "Generate javadoc for hadoop developers" >
<mkdir dir= "${build.javadoc.dev}" />
<javadoc
2009-10-09 21:10:41 -04:00
overview="${java.src.dir}/overview.html"
2009-05-19 00:30:38 -04:00
packagenames="org.apache.hadoop.*"
destdir="${build.javadoc.dev}"
author="true"
version="true"
use="true"
windowtitle="${Name} ${version} API"
doctitle="${Name} ${version} Developer API"
bottom="Copyright & copy; ${year} The Apache Software Foundation"
2009-10-29 13:08:42 -04:00
maxmemory="${javadoc.maxmemory}">
2011-06-12 18:00:51 -04:00
<packageset dir= "${java.src.dir}" />
2009-05-19 00:30:38 -04:00
<link href= "${javadoc.link.java}" />
<classpath >
<path refid= "classpath" />
<path refid= "javadoc-classpath" />
<pathelement path= "${java.class.path}" />
</classpath>
2011-06-12 18:00:51 -04:00
<group title= "${ant.project.name}" packages= "org.apache.*" />
2009-05-19 00:30:38 -04:00
</javadoc>
</target>
<target name= "javadoc-uptodate" depends= "compile, ivy-retrieve-javadoc" >
<uptodate property= "javadoc.is.uptodate" >
<srcfiles dir= "${src.dir}" >
<include name= "**/*.java" />
<include name= "**/*.html" />
</srcfiles>
<mapper type= "merge" to= "${build.javadoc.timestamp}" />
</uptodate>
</target>
2011-06-12 18:00:51 -04:00
<target name= "javadoc" description= "Generate javadoc" depends= "javadoc-uptodate"
2009-05-19 00:30:38 -04:00
unless="javadoc.is.uptodate">
<mkdir dir= "${build.javadoc}" />
<javadoc
2009-10-09 21:10:41 -04:00
overview="${java.src.dir}/overview.html"
2009-05-19 00:30:38 -04:00
packagenames="org.apache.hadoop.*"
destdir="${build.javadoc}"
author="true"
version="true"
use="true"
windowtitle="${Name} ${version} API"
doctitle="${Name} ${version} API"
bottom="Copyright & copy; ${year} The Apache Software Foundation"
2009-10-29 13:08:42 -04:00
maxmemory="${javadoc.maxmemory}">
2009-05-19 00:30:38 -04:00
2011-06-12 18:00:51 -04:00
<packageset dir= "${java.src.dir}" >
<exclude name= "org/apache/hadoop/hdfs/protocol/proto" />
</packageset>
<link href= "${javadoc.link.java}" />
2009-05-19 00:30:38 -04:00
<classpath >
<path refid= "classpath" />
<path refid= "javadoc-classpath" />
<pathelement path= "${java.class.path}" />
<pathelement location= "${build.tools}" />
</classpath>
2011-06-12 18:00:51 -04:00
<group title= "${ant.project.name}" packages= "org.apache.*" />
2009-05-19 00:30:38 -04:00
</javadoc>
</target>
<target name= "api-xml" depends= "ivy-retrieve-jdiff,javadoc,write-null" >
<javadoc maxmemory= "${javadoc.maxmemory}" >
2011-06-12 18:00:51 -04:00
<doclet name= "jdiff.JDiff"
path="${jdiff.jar}:${xerces.jar}">
2009-05-19 00:30:38 -04:00
<param name= "-apidir" value= "${jdiff.xml.dir}" />
2011-06-12 18:00:51 -04:00
<param name= "-apiname" value= "hadoop-hdfs ${version}" />
2009-05-19 00:30:38 -04:00
</doclet>
2011-06-12 18:00:51 -04:00
<packageset dir= "${java.src.dir}" >
<exclude name= "org/apache/hadoop/hdfs/protocol/proto" />
</packageset>
2009-05-19 00:30:38 -04:00
<classpath >
<path refid= "classpath" />
<path refid= "jdiff-classpath" />
<pathelement path= "${java.class.path}" />
</classpath>
</javadoc>
</target>
<target name= "write-null" >
<exec executable= "touch" >
<arg value= "${jdiff.home}/Null.java" />
</exec>
</target>
<target name= "api-report" depends= "ivy-retrieve-jdiff,api-xml" >
<mkdir dir= "${jdiff.build.dir}" />
2009-05-26 14:27:23 -04:00
<javadoc sourcepath= "src/java"
2009-05-19 00:30:38 -04:00
destdir="${jdiff.build.dir}"
sourceFiles="${jdiff.home}/Null.java"
maxmemory="${javadoc.maxmemory}">
2011-06-12 18:00:51 -04:00
<doclet name= "jdiff.JDiff"
path="${jdiff.jar}:${xerces.jar}">
<param name= "-oldapi" value= "hadoop-hdfs ${jdiff.stable}" />
<param name= "-newapi" value= "hadoop-hdfs ${version}" />
2009-05-19 00:30:38 -04:00
<param name= "-oldapidir" value= "${jdiff.xml.dir}" />
<param name= "-newapidir" value= "${jdiff.xml.dir}" />
<param name= "-javadocold" value= "${jdiff.stable.javadoc}" />
<param name= "-javadocnew" value= "../../api/" />
<param name= "-stats" />
</doclet>
<classpath >
<path refid= "classpath" />
<path refid= "jdiff-classpath" />
<pathelement path= "${java.class.path}" />
</classpath>
</javadoc>
</target>
<target name= "changes-to-html" description= "Convert CHANGES.txt into an html file" >
<mkdir dir= "${build.docs}" />
<exec executable= "perl" input= "CHANGES.txt" output= "${build.docs}/changes.html" failonerror= "true" >
<arg value= "${changes.src}/changes2html.pl" />
</exec>
<copy todir= "${build.docs}" >
<fileset dir= "${changes.src}" includes= "*.css" />
</copy>
</target>
<!-- ================================================================== -->
<!-- D I S T R I B U T I O N -->
<!-- ================================================================== -->
<!-- -->
<!-- ================================================================== -->
2011-06-12 18:00:51 -04:00
<target name= "package" depends= "compile, jar, javadoc, docs, api-report, create-libhdfs-configure, jar-test, ant-tasks, jsvc"
2009-05-19 00:30:38 -04:00
description="Build distribution">
<mkdir dir= "${dist.dir}" />
<mkdir dir= "${dist.dir}/lib" />
2011-06-12 18:00:51 -04:00
<mkdir dir= "${dist.dir}/contrib" />
2009-05-19 00:30:38 -04:00
<mkdir dir= "${dist.dir}/bin" />
<mkdir dir= "${dist.dir}/docs" />
<mkdir dir= "${dist.dir}/docs/api" />
<mkdir dir= "${dist.dir}/docs/jdiff" />
<copy todir= "${dist.dir}/lib" includeEmptyDirs= "false" flatten= "true" >
2011-06-12 18:00:51 -04:00
<fileset dir= "${hdfs.ivy.lib.dir}" />
2009-05-19 00:30:38 -04:00
</copy>
<copy todir= "${dist.dir}/lib" includeEmptyDirs= "false" >
<fileset dir= "lib" >
<exclude name= "**/native/**" />
</fileset>
</copy>
2011-06-12 18:00:51 -04:00
<subant target= "package" >
<!-- Pass down the version in case its needed again and the target
distribution directory so contribs know where to install to.-->
<property name= "version" value= "${version}" />
<property name= "dist.dir" value= "${dist.dir}" />
<fileset file= "${contrib.dir}/build.xml" />
</subant>
2009-05-19 00:30:38 -04:00
2011-02-15 14:30:12 -05:00
<copy todir= "${dist.dir}/webapps" >
<fileset dir= "${build.webapps}" />
</copy>
2009-05-19 00:30:38 -04:00
<copy todir= "${dist.dir}" >
2011-06-12 18:00:51 -04:00
<fileset file= "${build.dir}/${name}-*.jar" />
2009-05-19 00:30:38 -04:00
</copy>
2011-06-12 18:00:51 -04:00
2009-05-19 00:30:38 -04:00
<copy todir= "${dist.dir}/bin" >
<fileset dir= "bin" />
</copy>
<copy todir= "${dist.dir}/conf" >
<fileset dir= "${conf.dir}" excludes= "**/*.template" />
</copy>
<copy todir= "${dist.dir}/docs" >
<fileset dir= "${build.docs}" />
</copy>
<copy file= "ivy.xml" tofile= "${dist.dir}/ivy.xml" />
<copy todir= "${dist.dir}/ivy" >
<fileset dir= "ivy" />
</copy>
<copy todir= "${dist.dir}" >
<fileset dir= "." >
<include name= "*.txt" />
</fileset>
</copy>
<copy todir= "${dist.dir}/src" includeEmptyDirs= "true" >
<fileset dir= "src" excludes= "**/*.template **/docs/build/**/*" />
</copy>
2011-06-12 18:00:51 -04:00
<copy todir= "${dist.dir}/c++" includeEmptyDirs= "false" >
<fileset dir= "${build.dir}/c++" />
</copy>
2009-05-19 00:30:38 -04:00
<copy todir= "${dist.dir}/" file= "build.xml" />
2011-06-12 18:00:51 -04:00
<chmod perm= "ugo+x" file= "${dist.dir}/src/c++/libhdfs/configure" />
2009-05-19 00:30:38 -04:00
<chmod perm= "ugo+x" type= "file" parallel= "false" >
<fileset dir= "${dist.dir}/bin" />
2011-06-12 18:00:51 -04:00
<fileset dir= "${dist.dir}/src/contrib/" >
<include name= "*/bin/*" />
</fileset>
2009-05-19 00:30:38 -04:00
</chmod>
</target>
<!-- ================================================================== -->
<!-- Make release tarball -->
<!-- ================================================================== -->
<target name= "tar" depends= "package" description= "Make release tarball" >
<macro_tar param.destfile= "${build.dir}/${final.name}.tar.gz" >
<param.listofitems >
<tarfileset dir= "${build.dir}" mode= "664" >
<exclude name= "${final.name}/bin/*" />
2011-06-12 18:00:51 -04:00
<exclude name= "${final.name}/contrib/*/bin/*" />
<exclude name= "${final.name}/src/c++/libhdfs/configure" />
2009-05-19 00:30:38 -04:00
<include name= "${final.name}/**" />
</tarfileset>
<tarfileset dir= "${build.dir}" mode= "755" >
<include name= "${final.name}/bin/*" />
2011-06-12 18:00:51 -04:00
<include name= "${final.name}/contrib/*/bin/*" />
<include name= "${final.name}/src/c++/libhdfs/configure" />
2009-05-19 00:30:38 -04:00
</tarfileset>
</param.listofitems>
</macro_tar>
</target>
2011-06-12 18:00:51 -04:00
<target name= "bin-package" depends= "compile, compile-c++-libhdfs, jar, jar-test, ant-tasks, jsvc"
2009-05-19 00:30:38 -04:00
description="assembles artifacts for binary target">
<mkdir dir= "${dist.dir}" />
<mkdir dir= "${dist.dir}/lib" />
2011-05-27 12:35:02 -04:00
<mkdir dir= "${dist.dir}/${package.share.dir}/contrib" />
2011-06-12 18:00:51 -04:00
<mkdir dir= "${dist.dir}/${package.share.dir}/lib" />
2011-05-27 12:35:02 -04:00
<mkdir dir= "${dist.dir}/${package.share.dir}/templates" />
2009-05-19 00:30:38 -04:00
<mkdir dir= "${dist.dir}/bin" />
2011-05-27 12:35:02 -04:00
<mkdir dir= "${dist.dir}/libexec" />
<mkdir dir= "${dist.dir}/sbin" />
2009-05-19 00:30:38 -04:00
2011-05-27 12:35:02 -04:00
<copy todir= "${dist.dir}/${package.share.dir}/lib" includeEmptyDirs= "false" flatten= "true" >
2011-06-12 18:00:51 -04:00
<fileset dir= "${hdfs.ivy.lib.dir}" />
2009-05-19 00:30:38 -04:00
</copy>
2011-06-12 18:00:51 -04:00
<copy todir= "${dist.dir}/lib" includeEmptyDirs= "false" >
<fileset dir= "${build.dir}/c++/${build.platform}/lib" erroronmissingdir= "false" >
<include name= "**" />
2009-05-19 00:30:38 -04:00
</fileset>
</copy>
2011-06-12 18:00:51 -04:00
<subant target= "package" >
<!-- Pass down the version in case its needed again and the target
distribution directory so contribs know where to install to.-->
<property name= "version" value= "${version}" />
<property name= "dist.dir" value= "${dist.dir}/${package.share.dir}" />
<fileset file= "${contrib.dir}/build.xml" />
</subant>
2009-05-19 00:30:38 -04:00
2011-05-27 12:35:02 -04:00
<copy todir= "${dist.dir}/${package.share.dir}" >
2011-06-12 18:00:51 -04:00
<fileset file= "${build.dir}/${name}-*.jar" />
2009-05-19 00:30:38 -04:00
</copy>
2011-06-12 18:00:51 -04:00
2009-05-19 00:30:38 -04:00
<copy todir= "${dist.dir}/bin" >
2011-05-27 12:35:02 -04:00
<fileset dir= "bin" >
2011-06-12 18:00:51 -04:00
<include name= "hdfs" />
2011-05-27 12:35:02 -04:00
</fileset>
2009-05-19 00:30:38 -04:00
</copy>
2011-05-27 12:35:02 -04:00
<copy todir= "${dist.dir}/libexec" >
<fileset dir= "bin" >
2011-06-12 18:00:51 -04:00
<include name= "hdfs-config.sh" />
2011-05-27 12:35:02 -04:00
</fileset>
2009-05-19 00:30:38 -04:00
</copy>
2011-05-27 12:35:02 -04:00
<copy todir= "${dist.dir}/sbin" >
<fileset dir= "bin" >
2011-06-12 18:00:51 -04:00
<include name= "start-*.sh" />
<include name= "stop-*.sh" />
2011-05-27 12:35:02 -04:00
</fileset>
</copy>
2009-05-19 00:30:38 -04:00
2011-06-12 18:00:51 -04:00
<copy file= "${basedir}/src/packages/rpm/init.d/hadoop-namenode" tofile= "${dist.dir}/sbin/hadoop-namenode.redhat" />
<copy file= "${basedir}/src/packages/rpm/init.d/hadoop-datanode" tofile= "${dist.dir}/sbin/hadoop-datanode.redhat" />
<copy file= "${basedir}/src/packages/deb/init.d/hadoop-namenode" tofile= "${dist.dir}/sbin/hadoop-namenode.debian" />
<copy file= "${basedir}/src/packages/deb/init.d/hadoop-datanode" tofile= "${dist.dir}/sbin/hadoop-datanode.debian" />
<copy file= "${basedir}/src/packages/update-hdfs-env.sh" tofile= "${dist.dir}/sbin/update-hdfs-env.sh" />
2011-05-27 12:35:02 -04:00
<copy todir= "${dist.dir}/etc/hadoop" >
<fileset dir= "${conf.dir}" excludes= "**/*.template" />
2011-06-12 18:00:51 -04:00
<fileset dir= "${basedir}/src/packages/templates/conf" includes= "*.template" />
2009-05-19 00:30:38 -04:00
</copy>
2011-05-27 12:35:02 -04:00
<copy todir= "${dist.dir}/${package.share.dir}/templates" >
<fileset dir= "${basedir}/src/packages/templates/conf" includes= "*" />
</copy>
2011-06-12 18:00:51 -04:00
<copy todir= "${dist.dir}/${package.share.dir}/webapps" >
<fileset dir= "${build.webapps}" />
</copy>
2011-05-27 12:35:02 -04:00
<copy todir= "${dist.dir}/share/doc/hadoop/${module}" >
2009-05-19 00:30:38 -04:00
<fileset dir= "." >
<include name= "*.txt" />
</fileset>
</copy>
2011-06-12 18:00:51 -04:00
2009-05-19 00:30:38 -04:00
<chmod perm= "ugo+x" type= "file" parallel= "false" >
<fileset dir= "${dist.dir}/bin" />
2011-05-27 12:35:02 -04:00
<fileset dir= "${dist.dir}/sbin" />
2009-05-19 00:30:38 -04:00
</chmod>
</target>
2011-06-12 18:00:51 -04:00
<target name= "binary-system" depends= "bin-package, jar-system, jar-test-system"
description="make system test package for deployment">
<copy todir= "${system-test-build-dir}/${final.name}" >
<fileset dir= "${dist.dir}" >
</fileset>
</copy>
<copy todir= "${system-test-build-dir}/${final.name}/conf" >
<fileset dir= "${test.src.dir}/system/conf/" />
</copy>
<copy tofile= "${system-test-build-dir}/${final.name}/lib/hadoop-common-${hadoop-common.version}.jar"
file="${system-test-build-dir}/ivy/lib/${ant.project.name}/system/hadoop-common-${herriot.suffix}-${hadoop-common.version}.jar"
overwrite="true"/>
<copy tofile= "${system-test-build-dir}/${final.name}/${final.name}.jar"
file="${system-test-build-dir}/${instrumented.final.name}.jar" overwrite="true"/>
<copy tofile= "${system-test-build-dir}/${final.name}/${final.name}-sources.jar"
file="${system-test-build-dir}/${instrumented.final.name}-sources.jar" overwrite="true"/>
<copy todir= "${system-test-build-dir}/${final.name}"
file="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}.jar"/>
<copy todir= "${system-test-build-dir}/${final.name}"
file="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}-sources.jar"/>
<macro_tar
param.destfile="${system-test-build-dir}/${final.name}-bin.tar.gz">
<param.listofitems >
<tarfileset dir= "${system-test-build-dir}" mode= "664" >
<exclude name= "${final.name}/bin/*" />
<exclude name= "${final.name}/libexec/*" />
<exclude name= "${final.name}/src/**" />
<exclude name= "${final.name}/docs/**" />
<include name= "${final.name}/**" />
</tarfileset>
<tarfileset dir= "${build.dir}" mode= "755" >
<include name= "${final.name}/bin/*" />
<include name= "${final.name}/libexec/*" />
<include name= "${final.name}/sbin/*" />
</tarfileset>
</param.listofitems>
</macro_tar>
</target>
2009-05-19 00:30:38 -04:00
<target name= "binary" depends= "bin-package" description= "Make tarball without source and documentation" >
<macro_tar param.destfile= "${build.dir}/${final.name}-bin.tar.gz" >
<param.listofitems >
<tarfileset dir= "${build.dir}" mode= "664" >
<exclude name= "${final.name}/bin/*" />
2011-05-27 12:35:02 -04:00
<exclude name= "${final.name}/libexec/*" />
<exclude name= "${final.name}/sbin/*" />
2009-05-19 00:30:38 -04:00
<exclude name= "${final.name}/src/**" />
<exclude name= "${final.name}/docs/**" />
<include name= "${final.name}/**" />
</tarfileset>
<tarfileset dir= "${build.dir}" mode= "755" >
<include name= "${final.name}/bin/*" />
2011-05-27 12:35:02 -04:00
<include name= "${final.name}/libexec/*" />
<include name= "${final.name}/sbin/*" />
2009-05-19 00:30:38 -04:00
</tarfileset>
</param.listofitems>
</macro_tar>
</target>
2011-05-27 12:35:02 -04:00
<target name= "rpm" depends= "binary" description= "Make rpm package" >
<mkdir dir= "${package.buildroot}/BUILD" />
<mkdir dir= "${package.buildroot}/RPMS" />
<mkdir dir= "${package.buildroot}/SRPMS" />
<mkdir dir= "${package.buildroot}/SOURCES" />
<mkdir dir= "${package.buildroot}/SPECS" />
<copy todir= "${package.buildroot}/SOURCES" >
<fileset dir= "${build.dir}" >
<include name= "${final.name}-bin.tar.gz" />
</fileset>
</copy>
2011-06-12 18:00:51 -04:00
<copy file= "${src.dir}/packages/rpm/spec/hadoop-hdfs.spec" todir= "${package.buildroot}/SPECS" >
2011-05-27 12:35:02 -04:00
<filterchain >
<replacetokens >
<token key= "final.name" value= "${final.name}" />
<token key= "version" value= "${_version}" />
<token key= "package.release" value= "${package.release}" />
<token key= "package.build.dir" value= "${package.build.dir}" />
<token key= "package.prefix" value= "${package.prefix}" />
<token key= "package.conf.dir" value= "${package.conf.dir}" />
<token key= "package.log.dir" value= "${package.log.dir}" />
<token key= "package.pid.dir" value= "${package.pid.dir}" />
<token key= "package.var.dir" value= "${package.var.dir}" />
</replacetokens>
</filterchain>
</copy>
2011-06-12 18:00:51 -04:00
<rpm specFile= "hadoop-hdfs.spec" command= "-bb --target ${os.arch}" topDir= "${package.buildroot}" cleanBuildDir= "true" failOnError= "true" />
2011-05-27 12:35:02 -04:00
<copy todir= "${build.dir}/" flatten= "true" >
<fileset dir= "${package.buildroot}/RPMS" >
<include name= "**/*.rpm" />
</fileset>
</copy>
<delete dir= "${package.buildroot}" quiet= "true" verbose= "false" />
</target>
<target name= "deb" depends= "ivy-retrieve-package, binary" description= "Make deb package" >
<taskdef name= "deb"
classname="org.vafer.jdeb.ant.DebAntTask">
<classpath refid= "ivy-package.classpath" />
</taskdef>
<mkdir dir= "${package.build.dir}/hadoop.control" />
<mkdir dir= "${package.buildroot}/${package.prefix}" />
<copy todir= "${package.buildroot}/${package.prefix}" >
<fileset dir= "${build.dir}/${final.name}" >
<include name= "**" />
</fileset>
</copy>
<copy todir= "${package.build.dir}/hadoop.control" >
<fileset dir= "${src.dir}/packages/deb/hadoop.control" >
<exclude name= "control" />
</fileset>
</copy>
<copy file= "${src.dir}/packages/deb/hadoop.control/control" todir= "${package.build.dir}/hadoop.control" >
<filterchain >
<replacetokens >
<token key= "final.name" value= "${final.name}" />
<token key= "version" value= "${_version}" />
<token key= "package.release" value= "${package.release}" />
<token key= "package.build.dir" value= "${package.build.dir}" />
<token key= "package.prefix" value= "${package.prefix}" />
<token key= "package.conf.dir" value= "${package.conf.dir}" />
<token key= "package.log.dir" value= "${package.log.dir}" />
<token key= "package.pid.dir" value= "${package.pid.dir}" />
</replacetokens>
</filterchain>
</copy>
<deb destfile= "${package.buildroot}/${name}_${_version}-${package.release}_${os.arch}.deb" control= "${package.build.dir}/hadoop.control" >
<tarfileset dir= "${build.dir}/${final.name}" filemode= "644" prefix= "${package.prefix}" >
2011-06-12 18:00:51 -04:00
<exclude name= "bin/*" />
<exclude name= "${package.share.dir}/contrib/*/bin/*" />
2011-05-27 12:35:02 -04:00
<exclude name= "etc" />
<exclude name= "etc/**" />
2011-06-12 18:00:51 -04:00
<exclude name= "libexec/*" />
<exclude name= "sbin/*" />
2011-05-27 12:35:02 -04:00
<include name= "**" />
</tarfileset>
2011-06-12 18:00:51 -04:00
<tarfileset dir= "${build.dir}/${final.name}" filemode= "755" prefix= "${package.prefix}" >
<include name= "bin/*" />
<exclude name= "sbin/*.redhat" />
<exclude name= "sbin/*.debian" />
<include name= "sbin/*" />
<include name= "libexec/*" />
<include name= "${package.share.dir}/contrib/*/bin/*" />
2011-05-27 12:35:02 -04:00
</tarfileset>
<tarfileset dir= "${src.dir}/packages" filemode= "755" prefix= "${package.prefix}/sbin" >
<include name= "*.sh" />
</tarfileset>
<tarfileset dir= "${build.dir}/${final.name}/etc/hadoop" filemode= "644" prefix= "${package.conf.dir}" >
2011-06-12 18:00:51 -04:00
<include name= "**" />
<exclude name= "configuration.xsl" />
<exclude name= "hadoop-metrics2.properties" />
2011-05-27 12:35:02 -04:00
<exclude name= "core-site.xml" />
<exclude name= "hdfs-site.xml" />
<exclude name= "mapred-site.xml" />
2011-06-12 18:00:51 -04:00
</tarfileset>
<tarfileset dir= "${basedir}/src/packages/deb/init.d" filemode= "755" prefix= "/etc/init.d" >
<include name= "**" />
2011-05-27 12:35:02 -04:00
</tarfileset>
</deb>
<copy todir= "${build.dir}/" flatten= "true" >
<fileset dir= "${package.buildroot}" >
2011-06-12 18:00:51 -04:00
<include name= "**/${name}*.deb" />
2011-05-27 12:35:02 -04:00
</fileset>
</copy>
<delete dir= "${package.buildroot}" quiet= "true" verbose= "false" />
</target>
2009-05-19 00:30:38 -04:00
<!-- ================================================================== -->
<!-- Perform audit activities for the release -->
<!-- ================================================================== -->
2009-07-27 06:09:38 -04:00
<target name= "rats-taskdef" depends= "ivy-retrieve-releaseaudit" >
<typedef format= "xml" resource= "org/apache/rat/anttasks/antlib.xml" uri= "antlib:org.apache.rat.anttasks"
classpathref="releaseaudit-classpath"/>
</target>
<target name= "releaseaudit" depends= "package, rats-taskdef" description= "Release Audit activities" >
<rat:report xmlns:rat= "antlib:org.apache.rat.anttasks" >
<fileset dir= "${dist.dir}" >
2011-06-12 18:00:51 -04:00
<exclude name= "CHANGES.txt" />
<exclude name= "docs/" />
2009-07-27 06:09:38 -04:00
<exclude name= "lib/jdiff/" />
2011-06-12 18:00:51 -04:00
<exclude name= "**/conf/*" />
<exclude name= "webapps/**/WEB-INF/web.xml" />
<!-- generated code for protobufs don't have headers -->
<exclude name= "src/java/org/apache/hadoop/hdfs/protocol/proto/*Protos.java" />
<exclude name= "src/docs/releasenotes.html" />
<exclude name= "src/test/hdfs/org/apache/hadoop/cli/clitest_data/" />
<exclude name= "src/test/hdfs/org/apache/hadoop/hdfs/tools/offlineEditsViewer/editsStored*" />
<exclude name= "**/*/robots.txt" />
<exclude name= "src/c++/libhdfs/m4/libtool.m4" />
<exclude name= "src/c++/libhdfs/m4/lt~obsolete.m4" />
<exclude name= "src/c++/libhdfs/m4/ltoptions.m4" />
<exclude name= "src/c++/libhdfs/m4/ltsugar.m4" />
<exclude name= "src/c++/libhdfs/m4/ltversion.m4" />
<exclude name= "src/test/commit-tests" />
<exclude name= "src/test/smoke-tests" />
<exclude name= "src/test/all-tests" />
2011-03-11 19:24:30 -05:00
<exclude name= "src/test/empty-file" />
2011-06-12 18:00:51 -04:00
<exclude name= "**/*/*.properties" />
<exclude name= "src/c++/libhdfs/config.guess" />
<exclude name= "src/c++/libhdfs/config.sub" />
<exclude name= "src/c++/libhdfs/configure" />
<exclude name= "src/c++/libhdfs/depcomp" />
<exclude name= "src/c++/libhdfs/install-sh" />
<exclude name= "src/c++/libhdfs/ltmain.sh" />
<exclude name= "src/c++/libhdfs/missing" />
<exclude name= "src/test/checkstyle-noframes-sorted.xsl" />
<exclude name= "src/test/checkstyle.xml" />
<exclude name= "src/test/findbugsExcludeFile.xml" />
<exclude name= "src/docs/**/*.odg" />
<exclude name= "**/*.tgz" />
<exclude name= "**/*.tar" />
2009-07-27 06:09:38 -04:00
</fileset>
</rat:report>
2009-05-19 00:30:38 -04:00
</target>
<!-- ================================================================== -->
<!-- Clean. Delete the build files, and their directories -->
<!-- ================================================================== -->
2011-06-12 18:00:51 -04:00
<target name= "clean" depends= "clean-contrib, clean-fi, clean-sign" description= "Clean. Delete the build files, and their directories" >
2009-05-19 00:30:38 -04:00
<delete dir= "${build.dir}" />
2011-06-12 18:00:51 -04:00
<delete dir= "${build-fi.dir}" />
2009-05-19 00:30:38 -04:00
<delete dir= "${docs.src}/build" />
2011-06-12 18:00:51 -04:00
<delete file= "${hadoop-hdfs.pom}" />
<delete file= "${hadoop-hdfs-test.pom}" />
<delete file= "${hadoop-hdfs-instrumented.pom}" />
<delete file= "${hadoop-hdfs-instrumented-test.pom}" />
2009-05-19 00:30:38 -04:00
</target>
2010-07-07 22:29:15 -04:00
<target name= "clean-sign" description= "Clean. Delete .asc files" >
<delete >
<fileset dir= "." includes= "**/**/*.asc" />
</delete>
2011-06-12 18:00:51 -04:00
</target>
2010-07-07 22:29:15 -04:00
2011-06-12 18:00:51 -04:00
<target name= "veryclean" depends= "clean-cache,clean"
description="veryclean. Delete ant maven task and ivy jars">
2009-10-29 13:08:42 -04:00
<delete file= "${ant_task.jar}" />
<delete file= "${ivy.jar}" />
</target>
2011-06-12 18:00:51 -04:00
<target name= "clean-cache" depends= "clean" description= "Clean. Delete ivy cache" >
<delete dir= "${user.home}/.ivy2/cache/org.apache.hadoop" />
</target>
<!-- ================================================================== -->
<!-- Clean contrib target. For now, must be called explicitly -->
<!-- Using subant instead of ant as a workaround for 30569 -->
<!-- ================================================================== -->
<target name= "clean-contrib" >
<subant target= "clean" >
<fileset file= "src/contrib/build.xml" />
</subant>
</target>
<target name= "test-c++-libhdfs" depends= "compile-c++-libhdfs, compile-core" if= "libhdfs" unless= "clover.enabled" >
<delete dir= "${test.libhdfs.dir}" />
<mkdir dir= "${test.libhdfs.dir}" />
<mkdir dir= "${test.libhdfs.dir}/conf" />
<mkdir dir= "${test.libhdfs.dir}/logs" />
<mkdir dir= "${test.libhdfs.dir}/hdfs/name" />
<exec dir= "${build.c++.libhdfs}" executable= "${make.cmd}" failonerror= "true" >
<env key= "OS_NAME" value= "${os.name}" />
<env key= "OS_ARCH" value= "${os.arch}" />
<env key= "JVM_ARCH" value= "${jvm.arch}" />
<env key= "LIBHDFS_BUILD_DIR" value= "${build.c++.libhdfs}" />
<env key= "HADOOP_PREFIX" value= "${basedir}" />
<env key= "HADOOP_HDFS_HOME" value= "${basedir}" />
<env key= "HADOOP_CONF_DIR" value= "${test.libhdfs.dir}/conf" />
<env key= "HADOOP_LOG_DIR" value= "${test.libhdfs.dir}/logs" />
<env key= "LIBHDFS_TEST_DIR" value= "${test.libhdfs.dir}" />
<env key= "LIBHDFS_SRC_DIR" value= "${c++.libhdfs.src}" />
<env key= "LIBHDFS_INSTALL_DIR" value= "${install.c++}/lib" />
<env key= "LIB_DIR" value= "${common.ivy.lib.dir}" />
<env key= "CLOVER_JAR" value= "${clover.jar}" />
<env key= "HADOOP_VERSION" value= "${version}" />
<arg value= "test" />
</exec>
</target>
<target name= "check-libhdfs-configure" depends= "init" if= "libhdfs" >
<condition property= "need.libhdfs.configure" >
<not > <available file= "${c++.libhdfs.src}/configure" /> </not>
</condition>
</target>
<target name= "create-libhdfs-configure" depends= "check-libhdfs-configure" if= "need.libhdfs.configure" >
<mkdir dir= "${c++.libhdfs.src}/config" />
<exec executable= "autoreconf" dir= "${c++.libhdfs.src}"
searchpath="yes" failonerror="yes">
<arg value= "-if" />
</exec>
</target>
<target name= "check-libhdfs-makefile" depends= "init" if= "libhdfs" >
<condition property= "need.libhdfs.makefile" >
<not > <available file= "${c++.libhdfs.src}/Makefile" /> </not>
</condition>
</target>
<target name= "create-libhdfs-makefile" depends= "check-libhdfs-makefile"
if="need.libhdfs.makefile">
<antcall target= "create-libhdfs-configure" />
<mkdir dir= "${build.c++.libhdfs}" />
<exec executable= "${c++.libhdfs.src}/configure" dir= "${build.c++.libhdfs}"
failonerror="yes">
<env key= "ac_cv_func_malloc_0_nonnull" value= "yes" />
<env key= "JVM_ARCH" value= "${jvm.arch}" />
<arg value= "--prefix=${install.c++}" />
<env key= "base_dir" value= "${basedir}" />
</exec>
</target>
<target name= "compile-c++-libhdfs" depends= "create-libhdfs-makefile" if= "libhdfs" >
<exec executable= "${make.cmd}" dir= "${build.c++.libhdfs}" searchpath= "yes"
failonerror="yes">
<env key= "ac_cv_func_malloc_0_nonnull" value= "yes" />
<env key= "JVM_ARCH" value= "${jvm.arch}" />
<arg value= "install" />
</exec>
<!-- Create a build platform - agnostic link to c++ libs -->
<symlink overwrite= "true" link= "${build.dir}/c++/lib" resource= "${install.c++}/lib" />
</target>
<target name= "compile-ant-tasks" depends= "compile-core" >
<javac
encoding="${build.encoding}"
srcdir="${anttasks.dir}"
includes="org/apache/hadoop/ant/**/*.java"
destdir="${build.anttasks}"
debug="${javac.debug}"
optimize="${javac.optimize}"
target="${javac.version}"
source="${javac.version}"
deprecation="${javac.deprecation}">
<compilerarg line= "${javac.args}" />
<classpath refid= "classpath" />
</javac>
</target>
<target name= "ant-tasks" depends= "jar, compile-ant-tasks" >
<copy file= "${anttasks.dir}/org/apache/hadoop/ant/antlib.xml"
todir="${build.anttasks}/org/apache/hadoop/ant"/>
<jar destfile= "${build.dir}/${ant.final.name}.jar" >
<fileset dir= "${build.anttasks}" />
</jar>
</target>
<target name= "clover" depends= "clover.setup, clover.info" description= "Instrument the Unit tests using Clover. To use, specify -Dclover.home=<base of clover installation> -Drun.clover=true on the command line." />
2009-05-19 00:30:38 -04:00
<target name= "clover.setup" if= "clover.enabled" >
<taskdef resource= "cloverlib.xml" classpath= "${clover.jar}" />
<mkdir dir= "${clover.db.dir}" />
<clover-setup initString= "${clover.db.dir}/hadoop_coverage.db" >
2010-10-04 00:32:34 -04:00
<fileset dir= "${src.dir}" includes= "java/**/*" />
2011-06-12 18:00:51 -04:00
<testsources dir= "${test.src.dir}" />
2009-05-19 00:30:38 -04:00
</clover-setup>
2011-06-12 18:00:51 -04:00
<echo message= "HDFS-783: test-libhdfs is disabled for Clover'ed builds" />
2009-05-19 00:30:38 -04:00
</target>
<target name= "clover.info" unless= "clover.present" >
<echo >
Clover not found. Code coverage reports disabled.
</echo>
</target>
<target name= "clover.check" >
<fail unless= "clover.present" >
##################################################################
Clover not found.
Please specify -Dclover.home=< base of clover installation>
on the command line.
##################################################################
</fail>
</target>
<target name= "generate-clover-reports" depends= "clover.check, clover" >
<mkdir dir= "${clover.report.dir}" />
<clover-report >
<current outfile= "${clover.report.dir}" title= "${final.name}" >
<format type= "html" />
</current>
</clover-report>
<clover-report >
<current outfile= "${clover.report.dir}/clover.xml" title= "${final.name}" >
<format type= "xml" />
</current>
</clover-report>
</target>
<target name= "findbugs.check" depends= "check-for-findbugs" unless= "findbugs.present" >
<fail message= "'findbugs.home' is not defined. Please pass -Dfindbugs.home=<base of Findbugs installation> to Ant on the command-line." />
</target>
<target name= "patch.check" unless= "patch.file" >
<fail message= "'patch.file' is not defined. Please pass -Dpatch.file=<location of patch file> to Ant on the command-line." />
</target>
<target name= "test-patch" depends= "patch.check,findbugs.check,forrest.check" >
<exec executable= "bash" failonerror= "true" >
<arg value= "${basedir}/src/test/bin/test-patch.sh" />
<arg value= "DEVELOPER" />
<arg value= "${patch.file}" />
<arg value= "${scratch.dir}" />
<arg value= "${svn.cmd}" />
<arg value= "${grep.cmd}" />
<arg value= "${patch.cmd}" />
<arg value= "${findbugs.home}" />
<arg value= "${forrest.home}" />
<arg value= "${basedir}" />
</exec>
</target>
<target name= "hudson-test-patch" depends= "findbugs.check,forrest.check" >
<exec executable= "bash" failonerror= "true" >
<arg value= "${basedir}/src/test/bin/test-patch.sh" />
<arg value= "HUDSON" />
<arg value= "${scratch.dir}" />
<arg value= "${support.dir}" />
<arg value= "${ps.cmd}" />
<arg value= "${wget.cmd}" />
<arg value= "${jiracli.cmd}" />
<arg value= "${svn.cmd}" />
<arg value= "${grep.cmd}" />
<arg value= "${patch.cmd}" />
<arg value= "${findbugs.home}" />
<arg value= "${forrest.home}" />
<arg value= "${eclipse.home}" />
<arg value= "${basedir}" />
<arg value= "${jira.passwd}" />
<arg value= "${curl.cmd}" />
<arg value= "${defect}" />
</exec>
</target>
2010-03-19 18:39:28 -04:00
<condition property= "ant-eclipse.jar.exists" >
<available file= "${build.dir}/lib/ant-eclipse-1.0-jvm1.2.jar" />
</condition>
<target name= "ant-eclipse-download" unless= "ant-eclipse.jar.exists"
description="Downloads the ant-eclipse binary.">
<get src= "http://downloads.sourceforge.net/project/ant-eclipse/ant-eclipse/1.0/ant-eclipse-1.0.bin.tar.bz2"
dest="${build.dir}/ant-eclipse-1.0.bin.tar.bz2" usetimestamp="false" />
<untar src= "${build.dir}/ant-eclipse-1.0.bin.tar.bz2"
dest="${build.dir}" compression="bzip2">
<patternset >
<include name= "lib/ant-eclipse-1.0-jvm1.2.jar" />
</patternset>
</untar>
2011-06-12 18:00:51 -04:00
<delete file= "${build.dir}/java/ant-eclipse-1.0.bin.tar.bz2" />
2010-03-19 18:39:28 -04:00
</target>
2011-06-12 18:00:51 -04:00
2010-03-19 18:39:28 -04:00
<target name= "eclipse"
2011-06-12 18:00:51 -04:00
depends="init,ant-eclipse-download,ivy-retrieve-hdfs,ivy-retrieve-common,ivy-retrieve-test"
2010-03-19 18:39:28 -04:00
description="Create eclipse project files">
2011-06-12 18:00:51 -04:00
<pathconvert property= "eclipse.project" >
<path path= "${basedir}" />
<regexpmapper from= "^.*/([^/]+)$$" to= "\1" handledirsep= "yes" />
</pathconvert>
2010-03-19 18:39:28 -04:00
<taskdef name= "eclipse"
classname="prantl.ant.eclipse.EclipseTask"
classpath="${build.dir}/lib/ant-eclipse-1.0-jvm1.2.jar" />
<eclipse updatealways= "true" >
<project name= "${eclipse.project}" />
<classpath >
<source path= "${java.src.dir}"
output="${build.dir.eclipse-main-classes}" />
2011-06-12 18:00:51 -04:00
<source path= "${build.src}"
output="${build.dir.eclipse-main-generated-classes}" />
<source path= "${test.src.dir}/hdfs"
2010-03-19 18:39:28 -04:00
output="${build.dir.eclipse-test-classes}" />
2011-06-12 18:00:51 -04:00
<source path= "${test.src.dir}/unit"
2010-03-19 18:39:28 -04:00
output="${build.dir.eclipse-test-classes}" />
<output path= "${build.dir.eclipse-main-classes}" />
<library pathref= "ivy-common.classpath" exported= "true" />
2011-06-12 18:00:51 -04:00
<library pathref= "ivy-hdfs.classpath" exported= "true" />
2010-03-19 18:39:28 -04:00
<library pathref= "ivy-test.classpath" exported= "false" />
2011-06-12 18:00:51 -04:00
<library path= "${build.webapps.root.dir}" exported= "false" />
2010-03-19 18:39:28 -04:00
<library path= "${conf.dir}" exported= "false" />
</classpath>
</eclipse>
2011-06-12 18:00:51 -04:00
<copy todir= "." overwrite= "true" >
<fileset dir= ".eclipse.templates" >
<exclude name= "**/README.txt" />
</fileset>
<filterset >
<filter token= "PROJECT" value= "${eclipse.project}" />
</filterset>
</copy>
</target>
<target name= "generate-protos"
description="Generate Java code from protocol buffer definitions">
<exec executable= "bash" resultproperty= "which.protoc.result" outputproperty= "" >
<arg value= "-c" />
<arg value= "which ${protoc}" />
</exec>
<condition property= "protoc.found" >
<equals arg1= "${which.protoc.result}" arg2= "0" />
</condition>
<fail unless= "protoc.found"
message="No protoc compiler found. Please pass -Dprotoc=/path/to/protoc if it is not on your path." />
<exec executable= "${protoc}" failonerror= "true" >
<arg value= "--java_out=${java.src.dir}" />
<arg value= "--proto_path=${proto.src.dir}" />
<arg value= "${proto.src.dir}/hdfs.proto" />
<arg value= "${proto.src.dir}/datatransfer.proto" />
</exec>
<echo message= "Generated protocol buffer code successfully." />
2009-05-19 00:30:38 -04:00
</target>
<target name= "ivy-init-dirs" >
<mkdir dir= "${build.ivy.dir}" />
<mkdir dir= "${build.ivy.lib.dir}" />
<mkdir dir= "${build.ivy.report.dir}" />
<mkdir dir= "${build.ivy.maven.dir}" />
</target>
<target name= "ivy-probe-antlib" >
<condition property= "ivy.found" >
<typefound uri= "antlib:org.apache.ivy.ant" name= "cleancache" />
</condition>
</target>
<target name= "ivy-download" description= "To download ivy" unless= "offline" >
<get src= "${ivy_repo_url}" dest= "${ivy.jar}" usetimestamp= "true" />
</target>
2011-06-12 18:00:51 -04:00
<target name= "ant-task-download" description= "To download mvn-ant-task" unless= "offline" >
<get src= "${ant_task_repo_url}" dest= "${ant_task.jar}" usetimestamp= "true" />
</target>
<target name= "mvn-taskdef" depends= "ant-task-download" >
<path id= "mvn-ant-task.classpath" path= "${ant_task.jar}" />
<typedef resource= "org/apache/maven/artifact/ant/antlib.xml"
uri="urn:maven-artifact-ant" classpathref="mvn-ant-task.classpath"/>
</target>
<target name= "mvn-install" depends= "mvn-taskdef,jar,jar-test,set-version"
description="To install hadoop hdfs and test jars to local filesystem's m2 cache">
<artifact:pom file= "${hadoop-hdfs.pom}" id= "hadoop.hdfs" />
<artifact:pom file= "${hadoop-hdfs-test.pom}" id= "hadoop.hdfs.test" />
<artifact:install file= "${hadoop-hdfs.jar}" >
<pom refid= "hadoop.hdfs" />
<attach file= "${hadoop-hdfs-sources.jar}" classifier= "sources" />
</artifact:install>
<artifact:install file= "${hadoop-hdfs-test.jar}" >
<pom refid= "hadoop.hdfs.test" />
<attach file= "${hadoop-hdfs-test-sources.jar}" classifier= "sources" />
</artifact:install>
</target>
<target name= "mvn-si-install" depends= "mvn-install,-mvn-system-install"
description="Install system integration tests jars as well"/>
<target name= "mvn-deploy" depends= "mvn-taskdef, j a r , j a r - t e s t ,
jar-system, jar-test-system, set-version, signanddeploy, simpledeploy"
description="To deploy hadoop hdfs and test jar's to apache
snapshot's repository"/>
<target name= "signanddeploy" if= "staging" depends= "sign" >
<artifact:pom file= "${hadoop-hdfs.pom}" id= "hadoop.hdfs" />
<artifact:pom file= "${hadoop-hdfs-test.pom}" id= "hadoop.hdfs.test" />
<artifact:pom file= "${hadoop-hdfs-instrumented.pom}"
id="hadoop.hdfs.${herriot.suffix}"/>
<artifact:pom file= "${hadoop-hdfs-instrumented-test.pom}"
id="hadoop.hdfs.${herriot.suffix}.test"/>
<artifact:install-provider artifactId= "wagon-http"
version="${wagon-http.version}"/>
<artifact:deploy file= "${hadoop-hdfs.jar}" >
<remoteRepository id= "apache.staging.https" url= "${asfstagingrepo}" />
<pom refid= "hadoop.hdfs" />
<attach file= "${hadoop-hdfs.jar}.asc" type= "jar.asc" />
<attach file= "${hadoop-hdfs.pom}.asc" type= "pom.asc" />
<attach file= "${hadoop-hdfs-sources.jar}.asc" type= "jar.asc"
classifier="sources" />
<attach file= "${hadoop-hdfs-sources.jar}" classifier= "sources" />
</artifact:deploy>
<artifact:deploy file= "${hadoop-hdfs-test.jar}" >
<remoteRepository id= "apache.staging.https" url= "${asfstagingrepo}" />
<pom refid= "hadoop.hdfs.test" />
<attach file= "${hadoop-hdfs-test.jar}.asc" type= "jar.asc" />
<attach file= "${hadoop-hdfs-test.pom}.asc" type= "pom.asc" />
<attach file= "${hadoop-hdfs-test-sources.jar}.asc" type= "jar.asc"
classifier="sources"/>
<attach file= "${hadoop-hdfs-test-sources.jar}" classifier= "sources" />
</artifact:deploy>
<artifact:deploy file= "${hadoop-hdfs-instrumented.jar}" >
<remoteRepository id= "apache.staging.https" url= "${asfstagingrepo}" />
<pom refid= "hadoop.hdfs.${herriot.suffix}" />
<attach file= "${hadoop-hdfs-instrumented.jar}.asc" type= "jar.asc" />
<attach file= "${hadoop-hdfs-instrumented.pom}.asc" type= "pom.asc" />
<attach file= "${hadoop-hdfs-instrumented-sources.jar}.asc"
type="jar.asc" classifier="sources"/>
<attach file= "${hadoop-hdfs-instrumented-sources.jar}"
classifier="sources"/>
</artifact:deploy>
<artifact:deploy file= "${hadoop-hdfs-instrumented-test.jar}" >
<remoteRepository id= "apache.staging.https" url= "${asfstagingrepo}" />
<pom refid= "hadoop.hdfs.${herriot.suffix}.test" />
<attach file= "${hadoop-hdfs-instrumented-test.jar}.asc" type= "jar.asc" />
<attach file= "${hadoop-hdfs-instrumented-test.pom}.asc" type= "pom.asc" />
<attach file= "${hadoop-hdfs-instrumented-test-sources.jar}.asc"
type="jar.asc" classifier="sources"/>
<attach file= "${hadoop-hdfs-instrumented-test-sources.jar}"
classifier="sources"/>
</artifact:deploy>
</target>
<target name= "sign" depends= "clean-sign" if= "staging" >
<input message= "password:>" addproperty= "gpg.passphrase" >
<handler classname= "org.apache.tools.ant.input.SecureInputHandler" />
</input>
<macrodef name= "sign-artifact" description= "Signs the artifact" >
<attribute name= "input.file" />
<attribute name= "output.file" default= "@{input.file}.asc" />
<attribute name= "gpg.passphrase" />
<sequential >
<echo > Signing @{input.file} Sig File: @{output.file}</echo>
<exec executable= "gpg" >
<arg value= "--armor" />
<arg value= "--output" />
<arg value= "@{output.file}" />
<arg value= "--passphrase" />
<arg value= "@{gpg.passphrase}" />
<arg value= "--detach-sig" />
<arg value= "@{input.file}" />
</exec>
</sequential>
</macrodef>
<sign-artifact input.file= "${hadoop-hdfs.jar}"
output.file="${hadoop-hdfs.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file= "${hadoop-hdfs-test.jar}"
output.file="${hadoop-hdfs-test.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file= "${hadoop-hdfs-sources.jar}"
output.file="${hadoop-hdfs-sources.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file= "${hadoop-hdfs-test-sources.jar}"
output.file="${hadoop-hdfs-test-sources.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file= "${hadoop-hdfs.pom}"
output.file="${hadoop-hdfs.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file= "${hadoop-hdfs-test.pom}"
output.file="${hadoop-hdfs-test.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file= "${hadoop-hdfs-instrumented.jar}"
output.file="${hadoop-hdfs-instrumented.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file= "${hadoop-hdfs-instrumented.pom}"
output.file="${hadoop-hdfs-instrumented.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file= "${hadoop-hdfs-instrumented-sources.jar}"
output.file="${hadoop-hdfs-instrumented-sources.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file= "${hadoop-hdfs-instrumented-test.jar}"
output.file="${hadoop-hdfs-instrumented-test.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file= "${hadoop-hdfs-instrumented-test.pom}"
output.file="${hadoop-hdfs-instrumented-test.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file= "${hadoop-hdfs-instrumented-test-sources.jar}"
output.file="${hadoop-hdfs-instrumented-test-sources.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
</target>
<target name= "simpledeploy" unless= "staging" >
<artifact:pom file= "${hadoop-hdfs.pom}" id= "hadoop.hdfs" />
<artifact:pom file= "${hadoop-hdfs-test.pom}" id= "hadoop.hdfs.test" />
<artifact:pom file= "${hadoop-hdfs-instrumented.pom}"
id="hadoop.hdfs.${herriot.suffix}"/>
<artifact:install-provider artifactId= "wagon-http" version= "${wagon-http.version}" />
<artifact:deploy file= "${hadoop-hdfs.jar}" >
<remoteRepository id= "apache.snapshots.https" url= "${asfsnapshotrepo}" />
<pom refid= "hadoop.hdfs" />
<attach file= "${hadoop-hdfs-sources.jar}" classifier= "sources" />
</artifact:deploy>
<artifact:deploy file= "${hadoop-hdfs-test.jar}" >
<remoteRepository id= "apache.snapshots.https" url= "${asfsnapshotrepo}" />
<pom refid= "hadoop.hdfs.test" />
<attach file= "${hadoop-hdfs-test-sources.jar}" classifier= "sources" />
</artifact:deploy>
<artifact:deploy file= "${hadoop-hdfs-instrumented.jar}" >
<remoteRepository id= "apache.snapshots.https" url= "${asfsnapshotrepo}" />
<pom refid= "hadoop.hdfs.${herriot.suffix}" />
<attach file= "${hadoop-hdfs-instrumented-sources.jar}" classifier= "sources" />
</artifact:deploy>
</target>
<target name= "set-version" >
<delete file= "${basedir}/ivy/hadoop-hdfs.xml" />
<delete file= "${basedir}/ivy/hadoop-hdfs-test.xml" />
<delete file= "${basedir}/ivy/hadoop-hdfs-${herriot.suffix}.xml" />
<delete file= "${basedir}/ivy/hadoop-hdfs-${herriot.suffix}-test.xml" />
<copy file= "${basedir}/ivy/hadoop-hdfs-template.xml" tofile= "${basedir}/ivy/hadoop-hdfs.xml" />
<copy file= "${basedir}/ivy/hadoop-hdfs-test-template.xml" tofile= "${basedir}/ivy/hadoop-hdfs-test.xml" />
<copy file= "${basedir}/ivy/hadoop-hdfs-${herriot.suffix}-template.xml"
tofile="${basedir}/ivy/hadoop-hdfs-${herriot.suffix}.xml"/>
<copy file= "${basedir}/ivy/hadoop-hdfs-${herriot.suffix}-test-template.xml"
tofile="${basedir}/ivy/hadoop-hdfs-${herriot.suffix}-test.xml"/>
<replaceregexp byline= "true" >
<regexp pattern= "@version" />
<substitution expression= "${version}" />
<fileset dir= "${basedir}/ivy" >
<include name= "hadoop-hdfs.xml" />
<include name= "hadoop-hdfs-test.xml" />
<include name= "hadoop-hdfs-${herriot.suffix}.xml" />
<include name= "hadoop-hdfs-${herriot.suffix}-test.xml" />
</fileset>
</replaceregexp>
</target>
2009-05-19 00:30:38 -04:00
<!--
To avoid Ivy leaking things across big projects, always load Ivy in the same classloader.
Also note how we skip loading Ivy if it is already there, just to make sure all is well.
-->
<target name= "ivy-init-antlib" depends= "ivy-download,ivy-init-dirs,ivy-probe-antlib" unless= "ivy.found" >
<typedef uri= "antlib:org.apache.ivy.ant" onerror= "fail"
loaderRef="ivyLoader">
<classpath >
<pathelement location= "${ivy.jar}" />
</classpath>
</typedef>
<fail >
<condition >
<not >
<typefound uri= "antlib:org.apache.ivy.ant" name= "cleancache" />
</not>
</condition>
You need Apache Ivy 2.0 or later from http://ant.apache.org/
It could not be loaded from ${ivy_repo_url}
</fail>
</target>
2009-11-13 14:44:14 -05:00
<property name= "ivyresolvelog" value= "download-only" />
<property name= "ivyretrievelog" value= "quiet" />
2009-05-19 00:30:38 -04:00
<target name= "ivy-init" depends= "ivy-init-antlib" >
<!-- Configure Ivy by reading in the settings file
If anyone has already read in a settings file into this settings ID, it gets priority
-->
2011-06-12 18:00:51 -04:00
<ivy:configure settingsid= "${ant.project.name}.ivy.settings" file= "${ivysettings.xml}" override= 'false' />
2009-05-19 00:30:38 -04:00
</target>
<target name= "ivy-resolve" depends= "ivy-init" >
2009-11-13 14:44:14 -05:00
<ivy:resolve settingsRef= "${ant.project.name}.ivy.settings"
2011-06-12 18:00:51 -04:00
log="${ivyresolvelog}"/>
2009-05-19 00:30:38 -04:00
</target>
<target name= "ivy-resolve-javadoc" depends= "ivy-init" >
2009-11-13 14:44:14 -05:00
<ivy:resolve settingsRef= "${ant.project.name}.ivy.settings" conf= "javadoc"
2011-06-12 18:00:51 -04:00
log="${ivyresolvelog}"/>
2009-05-19 00:30:38 -04:00
</target>
<target name= "ivy-resolve-releaseaudit" depends= "ivy-init" >
2009-11-13 14:44:14 -05:00
<ivy:resolve settingsRef= "${ant.project.name}.ivy.settings" conf= "releaseaudit"
2011-06-12 18:00:51 -04:00
log="${ivyresolvelog}"/>
2009-05-19 00:30:38 -04:00
</target>
<target name= "ivy-resolve-test" depends= "ivy-init" >
2009-11-13 14:44:14 -05:00
<ivy:resolve settingsRef= "${ant.project.name}.ivy.settings" conf= "test"
2011-06-12 18:00:51 -04:00
log="${ivyresolvelog}"/>
</target>
<target name= "ivy-resolve-compile" depends= "ivy-init" >
<ivy:resolve settingsRef= "${ant.project.name}.ivy.settings" conf= "compile"
log="${ivyresolvelog}"/>
2009-05-19 00:30:38 -04:00
</target>
<target name= "ivy-resolve-common" depends= "ivy-init" >
2009-11-13 14:44:14 -05:00
<ivy:resolve settingsRef= "${ant.project.name}.ivy.settings" conf= "common"
2011-06-12 18:00:51 -04:00
log="${ivyresolvelog}"/>
2009-05-19 00:30:38 -04:00
</target>
2011-05-27 12:35:02 -04:00
<target name= "ivy-resolve-package" depends= "ivy-init" >
<ivy:resolve settingsRef= "${ant.project.name}.ivy.settings" conf= "package"
2011-06-12 18:00:51 -04:00
log="${ivyresolvelog}"/>
</target>
<target name= "ivy-resolve-hdfs" depends= "ivy-init" >
<ivy:resolve settingsRef= "${ant.project.name}.ivy.settings" conf= "hdfs"
log="${ivyresolvelog}"/>
2011-05-27 12:35:02 -04:00
</target>
2009-05-19 00:30:38 -04:00
<target name= "ivy-resolve-jdiff" depends= "ivy-init" >
2009-11-13 14:44:14 -05:00
<ivy:resolve settingsRef= "${ant.project.name}.ivy.settings" conf= "jdiff"
2011-06-12 18:00:51 -04:00
log="${ivyresolvelog}"/>
2009-05-19 00:30:38 -04:00
</target>
<target name= "ivy-resolve-checkstyle" depends= "ivy-init" >
2009-11-13 14:44:14 -05:00
<ivy:resolve settingsRef= "${ant.project.name}.ivy.settings" conf= "checkstyle"
2011-06-12 18:00:51 -04:00
log="${ivyresolvelog}"/>
</target>
<target name= "ivy-resolve-system" depends= "ivy-init" >
<ivy:resolve settingsRef= "${ant.project.name}.ivy.settings" conf= "system"
log="${ivyresolvelog}"/>
2009-05-19 00:30:38 -04:00
</target>
<target name= "ivy-retrieve" depends= "ivy-resolve"
description="Retrieve Ivy-managed artifacts">
<ivy:retrieve settingsRef= "${ant.project.name}.ivy.settings"
2009-11-13 14:44:14 -05:00
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
2011-06-12 18:00:51 -04:00
log="${ivyretrievelog}"/>
2009-05-19 00:30:38 -04:00
</target>
<target name= "ivy-retrieve-checkstyle" depends= "ivy-resolve-checkstyle"
description="Retrieve Ivy-managed artifacts for the checkstyle configurations">
<ivy:retrieve settingsRef= "${ant.project.name}.ivy.settings"
2009-11-13 14:44:14 -05:00
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
2011-06-12 18:00:51 -04:00
log="${ivyretrievelog}"/>
2009-05-19 00:30:38 -04:00
<ivy:cachepath pathid= "checkstyle-classpath" conf= "checkstyle" />
</target>
<target name= "ivy-retrieve-jdiff" depends= "ivy-resolve-jdiff"
2011-06-12 18:00:51 -04:00
description="Retrieve Ivy-managed artifacts for the javadoc configurations">
2009-05-19 00:30:38 -04:00
<ivy:retrieve settingsRef= "${ant.project.name}.ivy.settings"
2009-11-13 14:44:14 -05:00
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
2011-06-12 18:00:51 -04:00
log="${ivyretrievelog}"/>
2009-05-19 00:30:38 -04:00
<ivy:cachepath pathid= "jdiff-classpath" conf= "jdiff" />
</target>
<target name= "ivy-retrieve-javadoc" depends= "ivy-resolve-javadoc"
description="Retrieve Ivy-managed artifacts for the javadoc configurations">
<ivy:retrieve settingsRef= "${ant.project.name}.ivy.settings"
2009-11-13 14:44:14 -05:00
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
2011-06-12 18:00:51 -04:00
log="${ivyretrievelog}"/>
2009-05-19 00:30:38 -04:00
<ivy:cachepath pathid= "javadoc-classpath" conf= "javadoc" />
</target>
<target name= "ivy-retrieve-test" depends= "ivy-resolve-test"
description="Retrieve Ivy-managed artifacts for the test configurations">
<ivy:retrieve settingsRef= "${ant.project.name}.ivy.settings"
2009-11-13 14:44:14 -05:00
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
2011-06-12 18:00:51 -04:00
log="${ivyretrievelog}"/>
2009-06-01 02:11:05 -04:00
<ivy:cachepath pathid= "ivy-test.classpath" conf= "test" />
2009-05-19 00:30:38 -04:00
</target>
2011-06-12 18:00:51 -04:00
<target name= "ivy-retrieve-compile" depends= "ivy-resolve-compile"
2009-05-19 00:30:38 -04:00
description="Retrieve Ivy-managed artifacts for the compile configurations">
<ivy:retrieve settingsRef= "${ant.project.name}.ivy.settings"
2009-11-13 14:44:14 -05:00
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
2011-06-12 18:00:51 -04:00
log="${ivyretrievelog}"/>
<ivy:cachepath pathid= "ivy-compile.classpath" conf= "compile" />
</target>
<target name= "ivy-retrieve-common" depends= "ivy-resolve-common"
description="Retrieve Ivy-managed artifacts for the runtime configurations">
<ivy:retrieve settingsRef= "${ant.project.name}.ivy.settings"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
log="${ivyretrievelog}"/>
2009-05-19 00:30:38 -04:00
<ivy:cachepath pathid= "ivy-common.classpath" conf= "common" />
</target>
2011-05-27 12:35:02 -04:00
<target name= "ivy-retrieve-package" depends= "ivy-resolve-package"
description="Retrieve Ivy-managed artifacts for the package configurations">
<ivy:retrieve settingsRef= "${ant.project.name}.ivy.settings"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
2011-06-12 18:00:51 -04:00
log="${ivyretrievelog}"/>
2011-05-27 12:35:02 -04:00
<ivy:cachepath pathid= "ivy-package.classpath" conf= "package" />
</target>
2011-06-12 18:00:51 -04:00
<target name= "ivy-retrieve-hdfs" depends= "ivy-resolve-hdfs"
description="Retrieve Ivy-managed artifacts for the hdfs configurations">
<ivy:retrieve settingsRef= "${ant.project.name}.ivy.settings"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
log="${ivyretrievelog}"/>
<ivy:cachepath pathid= "ivy-hdfs.classpath" conf= "hdfs" />
</target>
2009-05-19 00:30:38 -04:00
<target name= "ivy-retrieve-releaseaudit" depends= "ivy-resolve-releaseaudit"
description="Retrieve Ivy-managed artifacts for the compile configurations">
<ivy:retrieve settingsRef= "${ant.project.name}.ivy.settings"
2011-06-12 18:00:51 -04:00
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
log="${ivyretrievelog}"/>
2009-05-19 00:30:38 -04:00
<ivy:cachepath pathid= "releaseaudit-classpath" conf= "releaseaudit" />
</target>
2011-06-12 18:00:51 -04:00
<target name= "ivy-retrieve-system" depends= "ivy-resolve-system"
description="Retrieve Ivy-managed artifacts for the system tests">
<ivy:retrieve settingsRef= "${ant.project.name}.ivy.settings"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
log="${ivyretrievelog}"/>
<ivy:cachepath pathid= "ivy-test.classpath" conf= "system" />
</target>
<target name= "ivy-report" depends= "ivy-resolve"
2009-05-19 00:30:38 -04:00
description="Generate">
<ivy:report todir= "${build.ivy.report.dir}" settingsRef= "${ant.project.name}.ivy.settings" />
<echo >
2011-06-12 18:00:51 -04:00
Reports generated: ${build.ivy.report.dir}
2009-05-19 00:30:38 -04:00
</echo>
</target>
2011-06-12 18:00:51 -04:00
<target name= "jsvc" >
<mkdir dir= "${jsvc.build.dir}" />
<get src= "${jsvc.location}" dest= "${jsvc.build.dir}/${jsvc.dest.name}" />
<untar compression= "gzip" src= "${jsvc.build.dir}/${jsvc.dest.name}" dest= "${jsvc.build.dir}" />
<copy file= "${jsvc.build.dir}/jsvc" todir= "${jsvc.install.dir}" verbose= "true" />
<chmod perm= "ugo+x" type= "file" >
<fileset file= "${jsvc.install.dir}/jsvc" />
</chmod>
</target>
2009-05-19 00:30:38 -04:00
</project>