MAPREDUCE-4447. Remove aop from cruft from the ant build. Contributed by Eli Collins

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1362207 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Eli Collins 2012-07-16 19:10:26 +00:00
parent c56bbc7a00
commit d3b1109de8
4 changed files with 5 additions and 1124 deletions

View File

@ -1,276 +0,0 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project name="aspects"
xmlns:artifact="urn:maven-artifact-ant">
<!-- Properties common for all fault injections -->
<property name="build-fi.dir" value="${basedir}/build-fi"/>
<property name="hadoop-fi.jar" location="${build.dir}/${final.name}-fi.jar" />
<property name="compile-inject.output" value="${build-fi.dir}/compile-fi.log"/>
<property name="aspectversion" value="1.6.5"/>
<property file="${basedir}/build.properties"/>
<!-- Properties related to system fault injection and tests -->
<property name="system-test-build-dir" value="${build-fi.dir}/system"/>
<!-- Properties specifically for system fault-injections and system tests -->
<property name="herriot.suffix" value="instrumented"/>
<property name="herriot.final.name" value="${name}-${herriot.suffix}-${version}"/>
<property name="hadoop-common-instrumented.pom"
location="${ivy.dir}/hadoop-common-${herriot.suffix}.xml" />
<property name="hadoop-common-instrumented.jar"
location="${system-test-build-dir}/${herriot.final.name}.jar" />
<property name="hadoop-common-instrumented-sources.jar"
location="${system-test-build-dir}/${herriot.final.name}-sources.jar" />
<!--All Fault Injection (FI) related targets are located in this session -->
<target name="clean-fi">
<delete dir="${build-fi.dir}"/>
</target>
<!-- Weaving aspects in place
Later on one can run 'ant jar-fault-inject' to create
Hadoop jar file with instrumented classes
-->
<!-- Target -classes-compilation has to be defined in build.xml and
needs to depend on classes compilation and test classes compilation
targets. This is a poor man parametrization for targets -->
<target name="-compile-fault-inject" depends="-classes-compilation" >
<!-- AspectJ task definition -->
<taskdef
resource="org/aspectj/tools/ant/taskdefs/aspectjTaskdefs.properties">
<classpath>
<pathelement
location="${common.ivy.lib.dir}/aspectjtools-${aspectversion}.jar"/>
</classpath>
</taskdef>
<echo message="Start weaving aspects in place"/>
<iajc
encoding="${build.encoding}"
srcdir="${java.src.dir};${build.src};${src.dir.path}"
includes="org/apache/hadoop/**/*.java, org/apache/hadoop/**/*.aj"
excludes="org/apache/hadoop/classification/tools/**/*, org/apache/hadoop/record/**/*"
destDir="${dest.dir}"
debug="${javac.debug}"
target="${javac.version}"
source="${javac.version}"
deprecation="${javac.deprecation}">
<classpath>
<path refid="test.classpath"/>
</classpath>
</iajc>
<loadfile property="injection.failure" srcfile="${compile-inject.output}">
<filterchain>
<linecontainsregexp>
<regexp pattern='iajc.*warning'/>
</linecontainsregexp>
</filterchain>
</loadfile>
<fail if="injection.failure">
Broken binding of advises: ${line.separator}${injection.failure}
</fail>
<echo message="Weaving of aspects is finished"/>
</target>
<!-- Classpath for running system tests -->
<path id="test.system.classpath">
<pathelement location="${hadoop.conf.dir.deployed}" />
<pathelement location="${system-test-build-dir}/test/extraconf" />
<pathelement location="${system-test-build-dir}/test/classes" />
<pathelement location="${system-test-build-dir}/classes" />
<pathelement location="${test.src.dir}" />
<pathelement location="${build-fi.dir}" />
<pathelement location="${build-fi.dir}/tools" />
<pathelement path="${clover.jar}" />
<fileset dir="${test.lib.dir}">
<include name="**/*.jar" />
<exclude name="**/excluded/" />
</fileset>
<fileset dir="${system-test-build-dir}">
<include name="**/*.jar" />
<exclude name="**/excluded/" />
</fileset>
<path refid="classpath" />
</path>
<target name="injectfaults"
description="Instrument classes with faults and other AOP advices">
<!--mkdir to prevent <subant> failure in case the folder has been removed-->
<mkdir dir="${build-fi.dir}"/>
<delete file="${compile-inject.output}"/>
<weave-injectfault-aspects dest.dir="${build-fi.dir}/classes"
src.dir="${test.src.dir}/aop">
</weave-injectfault-aspects>
</target>
<!-- =============================================================== -->
<!-- Create hadoop-{version}-dev-common.jar required to be deployed on -->
<!-- cluster for system tests -->
<!-- =============================================================== -->
<target name="jar-system"
depends="inject-system-faults"
description="make hadoop.jar">
<macro-jar-fault-inject target.name="jar"
build.dir="${system-test-build-dir}"
jar.final.name="final.name"
jar.final.value="${herriot.final.name}">
</macro-jar-fault-inject>
<jar jarfile="${system-test-build-dir}/${herriot.final.name}-sources.jar"
update="yes">
<fileset dir="${test.src.dir}/system/java" includes="org/apache/hadoop/**/*.java"/>
<fileset dir="${test.src.dir}/system/aop" includes="org/apache/hadoop/**/*.aj"/>
</jar>
</target>
<macrodef name="weave-injectfault-aspects">
<attribute name="dest.dir" />
<attribute name="src.dir" />
<sequential>
<subant buildpath="build.xml" target="-compile-fault-inject"
output="${compile-inject.output}">
<property name="build.dir" value="${build-fi.dir}" />
<property name="src.dir.path" value="@{src.dir}" />
<property name="dest.dir" value="@{dest.dir}" />
</subant>
</sequential>
</macrodef>
<target name="inject-system-faults" description="Inject system faults">
<property name="build-fi.dir" value="${system-test-build-dir}" />
<mkdir dir="${build-fi.dir}"/>
<delete file="${compile-inject.output}"/>
<weave-injectfault-aspects dest.dir="${system-test-build-dir}/classes"
src.dir="${test.src.dir}/system/java;${test.src.dir}/system/aop">
</weave-injectfault-aspects>
</target>
<macrodef name="macro-run-tests-fault-inject">
<attribute name="target.name" />
<attribute name="testcasesonly" />
<sequential>
<subant buildpath="build.xml" target="@{target.name}">
<property name="build.dir" value="${build-fi.dir}"/>
<property name="test.fault.inject" value="yes"/>
<property name="test.include" value="TestFi*"/>
<!-- This one is needed for the special "regression" target only -->
<property name="special.fi.testcasesonly" value="@{testcasesonly}"/>
</subant>
</sequential>
</macrodef>
<!-- ================================================================== -->
<!-- Make hadoop-fi.jar including all Fault injected artifacts -->
<!-- ================================================================== -->
<macrodef name="macro-jar-fault-inject">
<attribute name="target.name" />
<attribute name="build.dir" />
<attribute name="jar.final.name" />
<attribute name="jar.final.value" />
<sequential>
<subant buildpath="build.xml" target="@{target.name}">
<property name="build.dir" value="@{build.dir}"/>
<property name="@{jar.final.name}" value="@{jar.final.value}"/>
<property name="jar.extra.properties.list"
value="${test.src.dir}/fi-site.xml" />
</subant>
</sequential>
</macrodef>
<!-- ================================================================== -->
<!-- Make test jar files including all Fault Injected artifacts -->
<!-- ================================================================== -->
<macrodef name="macro-jar-test-fault-inject">
<attribute name="target.name" />
<attribute name="jar.final.name" />
<attribute name="jar.final.value" />
<sequential>
<subant buildpath="build.xml" target="@{target.name}">
<property name="build.dir" value="${build-fi.dir}"/>
<property name="@{jar.final.name}"
value="@{jar.final.value}"/>
</subant>
</sequential>
</macrodef>
<!--End of Fault Injection (FI) related session-->
<!-- Start of cluster controller binary target -->
<property name="runAs.src"
value ="${test.src.dir}/system/c++/runAs"/>
<property name="runAs.build.dir"
value="${system-test-build-dir}/c++-build"/>
<property name="runAs.configure.script"
value="${runAs.build.dir}/configure"/>
<target name="init-runAs-build">
<condition property="runAs.parameters.passed">
<not>
<equals arg1="${run-as.hadoop.home.dir}"
arg2="$${run-as.hadoop.home.dir}"/>
</not>
</condition>
<fail unless="runAs.parameters.passed"
message="Required parameters run-as.hadoop.home.dir not passed to the build"/>
<mkdir dir="${runAs.build.dir}"/>
<copy todir="${runAs.build.dir}" overwrite="true">
<fileset dir="${runAs.src}" includes="**/*"/>
</copy>
<chmod perm="+x" file="${runAs.configure.script}">
</chmod>
</target>
<target name="configure-runAs"
depends="init-runAs-build">
<exec executable="${runAs.configure.script}"
dir="${runAs.build.dir}" failonerror="true">
<arg value="--with-home=${run-as.hadoop.home.dir}"/>
</exec>
</target>
<target name="run-as" depends="configure-runAs">
<exec executable="${make.cmd}" dir="${runAs.build.dir}"
searchpath="yes" failonerror="yes">
<arg value="all" />
</exec>
</target>
<!-- End of cluster controller binary target -->
<!-- Maven -->
<target name="-mvn-system-install" depends="mvn-taskdef, jar-system">
<artifact:pom file="${hadoop-common-instrumented.pom}" id="hadoop.core.${herriot.suffix}"/>
<artifact:install file="${hadoop-common-instrumented.jar}">
<pom refid="hadoop.core.${herriot.suffix}"/>
<attach file="${hadoop-common-instrumented-sources.jar}" classifier="sources" />
</artifact:install>
</target>
<!-- -mvn-system-deploy target is no more called from the root
build.xml -->
<target name="-mvn-system-deploy" depends="mvn-taskdef, jar-system">
<artifact:pom file="${hadoop-common-instrumented.pom}"
id="hadoop.core.${herriot.suffix}"/>
<artifact:install-provider artifactId="wagon-http" version="1.0-beta-2"/>
<artifact:deploy file="${hadoop-common-instrumented.jar}">
<remoteRepository id="apache.snapshots.https" url="${asfrepo}"/>
<pom refid="hadoop.core.${herriot.suffix}"/>
<attach file="${hadoop-common-instrumented-sources.jar}" classifier="sources"/>
</artifact:deploy>
</target>
<!-- End of Maven -->
</project>

View File

@ -1,349 +0,0 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project name="aspects"
xmlns:artifact="urn:maven-artifact-ant">
<!-- The followng are duplications and have to be customized elsewhere too -->
<!-- TODO this version has to be updated synchronously with Ivy -->
<property name="aspectversion" value="1.6.5"/>
<!-- TODO this has to be changed synchronously with build.xml version prop.-->
<!-- this workarounds of test-patch setting its own 'version' -->
<property name="project.version" value="2.0.0-SNAPSHOT"/>
<!-- Properties common for all fault injections -->
<property name="build-fi.dir" value="${basedir}/build-fi"/>
<property name="hadoop-fi.jar" location="${build.dir}/${final.name}-fi.jar" />
<property name="compile-inject.output" value="${build-fi.dir}/compile-fi.log"/>
<property file="${basedir}/build.properties"/>
<!-- Properties related to system fault injection and tests -->
<property name="system-test-build-dir" value="${build-fi.dir}/system"/>
<!-- This varialbe is set by respective injection targets -->
<property name="hadoop.instrumented.jar" value=""/>
<!-- Properties specifically for system fault-injections and system tests -->
<property name="herriot.suffix" value="instrumented"/>
<property name="instrumented.final.name"
value="${name}-${herriot.suffix}-${version}"/>
<property name="hadoop-hdfs-instrumented.pom"
location="${ivy.dir}/hadoop-hdfs-${herriot.suffix}.xml" />
<property name="hadoop-hdfs-instrumented-test.pom"
location="${ivy.dir}/hadoop-hdfs-${herriot.suffix}-test.xml" />
<property name="hadoop-hdfs-instrumented.jar"
location="${system-test-build-dir}/${name}-${herriot.suffix}-${version}.jar" />
<property name="hadoop-hdfs-instrumented-sources.jar"
location="${system-test-build-dir}/${name}-${herriot.suffix}-${version}-sources.jar" />
<property name="hadoop-hdfs-instrumented-test.jar"
location="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}.jar" />
<property name="hadoop-hdfs-instrumented-test-sources.jar"
location="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}-sources.jar" />
<!--All Fault Injection (FI) related targets are located in this session -->
<target name="clean-fi">
<delete dir="${build-fi.dir}"/>
</target>
<!-- Weaving aspects in place
Later on one can run 'ant jar-fault-inject' to create
Hadoop jar file with instrumented classes
-->
<!-- Target -classes-compilation has to be defined in build.xml and
needs to depend on classes compilation and test classes compilation
targets. This is a poor man parametrization for targets -->
<target name="-compile-fault-inject" depends="-classes-compilation" >
<!-- AspectJ task definition -->
<taskdef
resource="org/aspectj/tools/ant/taskdefs/aspectjTaskdefs.properties">
<classpath>
<pathelement
location="${common.ivy.lib.dir}/aspectjtools-${aspectversion}.jar"/>
</classpath>
</taskdef>
<echo message="Start weaving aspects in place"/>
<path id="aspect.path">
<pathelement location="${hadoop.instrumented.jar}"/>
</path>
<iajc
encoding="${build.encoding}"
srcdir="${java.src.dir};${build.src};${src.dir.path}"
includes="org/apache/hadoop/**/*.java, org/apache/hadoop/**/*.aj"
excludes="org/apache/hadoop/classification/tools/**/*, org/apache/hadoop/record/**/*"
destDir="${dest.dir}"
debug="${javac.debug}"
target="${javac.version}"
source="${javac.version}"
deprecation="${javac.deprecation}"
fork="true"
maxmem="256m">
<aspectpath refid="aspect.path"/>
<classpath refid="test.classpath"/>
</iajc>
<loadfile property="injection.failure" srcfile="${compile-inject.output}">
<filterchain>
<linecontainsregexp>
<regexp pattern='iajc.*warning'/>
</linecontainsregexp>
</filterchain>
</loadfile>
<fail if="injection.failure">
Broken binding of advises: ${line.separator}${injection.failure}
</fail>
<echo message="Weaving of aspects is finished"/>
</target>
<!-- Classpath for running system tests -->
<path id="test.system.classpath">
<pathelement location="${hadoop.conf.dir.deployed}" />
<pathelement location="${system-test-build-dir}/test/extraconf" />
<pathelement location="${system-test-build-dir}/test/classes" />
<pathelement location="${system-test-build-dir}/classes" />
<pathelement location="${test.src.dir}" />
<pathelement location="${build-fi.dir}" />
<pathelement location="${build-fi.dir}/tools" />
<pathelement path="${clover.jar}" />
<fileset dir="${system-test-build-dir}">
<include name="**/*.jar" />
<exclude name="**/excluded/" />
</fileset>
<path refid="classpath" />
</path>
<!-- ================ -->
<!-- run system tests -->
<!-- ================ -->
<target name="test-system" depends="init, ivy-retrieve-system"
description="Run system tests">
<subant buildpath="build.xml" target="jar-test-system"/>
<macro-test-runner test.file="${test.hdfs.all.tests.file}"
suite.type="system/test"
classpath="test.system.classpath"
test.dir="${system-test-build-dir}/test"
fileset.dir="${test.src.dir}"
hadoop.conf.dir.deployed="${hadoop.conf.dir.deployed}">
</macro-test-runner>
</target>
<target name="injectfaults"
description="Instrument classes with faults and other AOP advices">
<!--mkdir to prevent <subant> failure in case the folder has been removed-->
<mkdir dir="${build-fi.dir}"/>
<delete file="${compile-inject.output}"/>
<weave-injectfault-aspects dest.dir="${build-fi.dir}/classes"
src.dir="${test.src.dir}/aop"
aspects.jar="${build-fi.dir}/ivy/lib/${ant.project.name}/common/hadoop-common-${project.version}.jar">
</weave-injectfault-aspects>
</target>
<!-- =============================================================== -->
<!-- Create hadoop-{version}-dev-core.jar required to be deployed on -->
<!-- cluster for system tests -->
<!-- =============================================================== -->
<target name="jar-system"
depends="inject-system-faults"
description="Make hadoop-hdfs-instrumented.jar with system injections.">
<macro-jar-fault-inject target.name="jar"
build.dir="${system-test-build-dir}"
jar.final.name="final.name"
jar.final.value="${instrumented.final.name}">
</macro-jar-fault-inject>
<jar jarfile="${system-test-build-dir}/${instrumented.final.name}-sources.jar"
update="yes">
<fileset dir="${test.src.dir}/system/java" includes="org/apache/hadoop/**/*.java" />
<fileset dir="${test.src.dir}/system/aop" includes="org/apache/hadoop/**/*.aj" />
</jar>
</target>
<target name="jar-test-system" depends="inject-system-faults, compile-test-system"
description="Make hadoop-hdfs-instrumented-test.jar with system injections.">
<subant buildpath="build.xml" target="-do-jar-test">
<property name="build.dir" value="${system-test-build-dir}"/>
<property name="test.hdfs.final.name" value="${name}-${herriot.suffix}-test-${version}"/>
<property name="test.hdfs.build.classes"
value="${system-test-build-dir}/test/classes"/>
</subant>
<jar jarfile="${hadoop-hdfs-instrumented-test-sources.jar}">
<fileset dir="${test.src.dir}/system/test" includes="org/apache/hadoop/**/*.java" />
</jar>
</target>
<target name="compile-test-system" description="Compiles system tests">
<subant buildpath="build.xml" target="-compile-test-system.wrapper">
<property name="build.dir" value="${system-test-build-dir}"/>
</subant>
</target>
<target name="-compile-test-system.wrapper" depends="inject-system-faults, ivy-retrieve-common, ivy-retrieve-hdfs, ivy-retrieve-system">
<macro-compile-hdfs-test
target.dir="${system-test-build-dir}/test/classes"
source.dir="${test.src.dir}/system/test"
dest.dir="${system-test-build-dir}/test/classes"
classpath="test.system.classpath"/>
</target>
<macrodef name="weave-injectfault-aspects">
<attribute name="dest.dir" />
<attribute name="src.dir" />
<attribute name="aspects.jar"/>
<attribute name="base.build.dir" default="${build-fi.dir}"/>
<sequential>
<subant buildpath="build.xml" target="-compile-fault-inject"
output="${compile-inject.output}">
<property name="build.dir" value="@{base.build.dir}" />
<property name="src.dir.path" value="@{src.dir}" />
<property name="dest.dir" value="@{dest.dir}" />
<property name="hadoop.instrumented.jar" value="@{aspects.jar}"/>
</subant>
</sequential>
</macrodef>
<target name="inject-system-faults"
description="Inject system faults">
<mkdir dir="${system-test-build-dir}"/>
<delete file="${compile-inject.output}"/>
<subant buildpath="build.xml" target="ivy-retrieve-system">
<property name="build.dir" value="${system-test-build-dir}"/>
</subant>
<weave-injectfault-aspects dest.dir="${system-test-build-dir}/classes"
src.dir="${test.src.dir}/system/java;${test.src.dir}/system/aop"
aspects.jar="${system-test-build-dir}/ivy/lib/${ant.project.name}/system/hadoop-common-${herriot.suffix}-${project.version}.jar"
base.build.dir="${system-test-build-dir}">
</weave-injectfault-aspects>
</target>
<macrodef name="macro-run-tests-fault-inject">
<attribute name="target.name" />
<attribute name="testcasesonly" />
<sequential>
<subant buildpath="build.xml" target="@{target.name}">
<property name="build.dir" value="${build-fi.dir}"/>
<property name="test.fault.inject" value="yes"/>
<property name="test.include" value="TestFi*"/>
<!-- This one is needed for the special "regression" target only -->
<property name="special.fi.testcasesonly" value="@{testcasesonly}"/>
</subant>
</sequential>
</macrodef>
<!-- ================================================================== -->
<!-- Make hadoop-fi.jar including all Fault injected artifacts -->
<!-- ================================================================== -->
<macrodef name="macro-jar-fault-inject">
<attribute name="target.name" />
<attribute name="build.dir" />
<attribute name="jar.final.name" />
<attribute name="jar.final.value" />
<sequential>
<subant buildpath="build.xml" target="@{target.name}">
<property name="build.dir" value="@{build.dir}"/>
<property name="@{jar.final.name}" value="@{jar.final.value}"/>
<property name="jar.extra.properties.list"
value="${test.src.dir}/fi-site.xml" />
</subant>
</sequential>
</macrodef>
<!-- ================================================================== -->
<!-- Make test jar files including all Fault Injected artifacts -->
<!-- ================================================================== -->
<macrodef name="macro-jar-test-fault-inject">
<attribute name="target.name" />
<attribute name="jar.final.name" />
<attribute name="jar.final.value" />
<sequential>
<subant buildpath="build.xml" target="@{target.name}">
<property name="build.dir" value="${build-fi.dir}"/>
<property name="@{jar.final.name}"
value="@{jar.final.value}"/>
</subant>
</sequential>
</macrodef>
<!--End of Fault Injection (FI) related session-->
<!-- Start of cluster controller binary target -->
<property name="runAs.src"
value ="${test.src.dir}/system/c++/runAs"/>
<property name="runAs.build.dir"
value="${system-test-build-dir}/c++-build"/>
<property name="runAs.configure.script"
value="${runAs.build.dir}/configure"/>
<target name="init-runAs-build">
<condition property="runAs.parameters.passed">
<not>
<equals arg1="${run-as.hadoop.home.dir}"
arg2="$${run-as.hadoop.home.dir}"/>
</not>
</condition>
<fail unless="runAs.parameters.passed"
message="Required parameters run-as.hadoop.home.dir not passed to the build"/>
<mkdir dir="${runAs.build.dir}"/>
<copy todir="${runAs.build.dir}" overwrite="true">
<fileset dir="${runAs.src}" includes="**/*"/>
</copy>
<chmod perm="+x" file="${runAs.configure.script}">
</chmod>
</target>
<target name="configure-runAs"
depends="init-runAs-build">
<exec executable="${runAs.configure.script}"
dir="${runAs.build.dir}" failonerror="true">
<arg value="--with-home=${run-as.hadoop.home.dir}"/>
</exec>
</target>
<target name="run-as" depends="configure-runAs">
<exec executable="${make.cmd}" dir="${runAs.build.dir}"
searchpath="yes" failonerror="yes">
<arg value="all" />
</exec>
</target>
<!-- End of cluster controller binary target -->
<!-- Install Herriot artifacts to the local Maven -->
<target name="-mvn-system-install" depends="mvn-taskdef, jar-system, jar-test-system">
<artifact:pom file="${hadoop-hdfs-instrumented.pom}"
id="hadoop.hdfs.${herriot.suffix}"/>
<artifact:pom file="${hadoop-hdfs-instrumented-test.pom}"
id="hadoop.hdfs.${herriot.suffix}.test"/>
<artifact:install file="${hadoop-hdfs-instrumented.jar}">
<pom refid="hadoop.hdfs.${herriot.suffix}"/>
<attach file="${hadoop-hdfs-instrumented-sources.jar}" classifier="sources" />
</artifact:install>
<artifact:install file="${hadoop-hdfs-instrumented-test.jar}">
<pom refid="hadoop.hdfs.${herriot.suffix}.test"/>
<attach file="${hadoop-hdfs-instrumented-test-sources.jar}" classifier="sources" />
</artifact:install>
</target>
<target name="-mvn-system-deploy" depends="mvn-taskdef, jar-system, jar-test-system">
<property name="repourl" value="https://repository.apache.org/content/repositories/snapshots" />
<artifact:pom file="${hadoop-hdfs-instrumented.pom}"
id="hadoop.hdfs.${herriot.suffix}"/>
<artifact:pom file="${hadoop-hdfs-instrumented-test.pom}"
id="hadoop.hdfs.${herriot.suffix}.test"/>
<artifact:install-provider artifactId="wagon-http" version="1.0-beta-2"/>
<artifact:deploy file="${hadoop-hdfs-instrumented.jar}">
<remoteRepository id="apache.snapshots.https" url="${repourl}"/>
<pom refid="hadoop.hdfs.${herriot.suffix}"/>
<attach file="${hadoop-hdfs-instrumented-sources.jar}" classifier="sources" />
</artifact:deploy>
<artifact:deploy file="${hadoop-hdfs-instrumented-test.jar}">
<remoteRepository id="apache.snapshots.https" url="${repourl}"/>
<pom refid="hadoop.hdfs.${herriot.suffix}.test"/>
<attach file="${hadoop-hdfs-instrumented-test-sources.jar}" classifier="sources" />
</artifact:deploy>
</target>
<!-- End of Maven -->
</project>

View File

@ -31,7 +31,6 @@
<property name="module" value="mapreduce"/> <property name="module" value="mapreduce"/>
<property name="Name" value="Hadoop-Mapred"/> <property name="Name" value="Hadoop-Mapred"/>
<property name="name" value="hadoop-${module}"/> <property name="name" value="hadoop-${module}"/>
<!-- Need to change aop.xml project.version prop. synchronously -->
<property name="_version" value="2.0.1"/> <property name="_version" value="2.0.1"/>
<property name="version" value="${_version}-SNAPSHOT"/> <property name="version" value="${_version}-SNAPSHOT"/>
<property name="final.name" value="${name}-${version}"/> <property name="final.name" value="${name}-${version}"/>
@ -204,7 +203,6 @@
<property name="hadoop-mapred-test.jar" location="${build.dir}/${test.final.name}.jar" /> <property name="hadoop-mapred-test.jar" location="${build.dir}/${test.final.name}.jar" />
<property name="hadoop-mapred-examples.jar" location="${build.dir}/${examples.final.name}.jar" /> <property name="hadoop-mapred-examples.jar" location="${build.dir}/${examples.final.name}.jar" />
<property name="hadoop-mapred-tools.jar" location="${build.dir}/${tools.final.name}.jar" /> <property name="hadoop-mapred-tools.jar" location="${build.dir}/${tools.final.name}.jar" />
<property name="hadoop-mapred-fi.jar" location="${build.dir}/${final.name}-fi.jar" />
<property name="hadoop-mapred-sources.jar" location="${build.dir}/${final.name}-sources.jar" /> <property name="hadoop-mapred-sources.jar" location="${build.dir}/${final.name}-sources.jar" />
<property name="hadoop-mapred-test-sources.jar" location="${build.dir}/${test.final.name}-sources.jar" /> <property name="hadoop-mapred-test-sources.jar" location="${build.dir}/${test.final.name}-sources.jar" />
<property name="hadoop-mapred-examples-sources.jar" location="${build.dir}/${examples.final.name}-sources.jar" /> <property name="hadoop-mapred-examples-sources.jar" location="${build.dir}/${examples.final.name}-sources.jar" />
@ -661,17 +659,11 @@
<propertyref name="compile.c++"/> <propertyref name="compile.c++"/>
</syspropertyset> </syspropertyset>
<!-- Pass probability specifications to the spawn JVM -->
<syspropertyset id="FaultProbabilityProperties">
<propertyref regex="fi.*"/>
</syspropertyset>
<sysproperty key="test.system.hdrc.deployed.hadoopconfdir"
value="@{hadoop.conf.dir.deployed}" />
<classpath refid="@{classpath}"/> <classpath refid="@{classpath}"/>
<formatter type="${test.junit.output.format}" /> <formatter type="${test.junit.output.format}" />
<batchtest todir="@{test.dir}" unless="testcase"> <batchtest todir="@{test.dir}" unless="testcase">
<fileset dir="@{fileset.dir}/@{suite.type}" <fileset dir="@{fileset.dir}/@{suite.type}"
excludes="**/${test.exclude}.java aop/** system/**"> excludes="**/${test.exclude}.java">
<patternset> <patternset>
<includesfile name="@{test.file}"/> <includesfile name="@{test.file}"/>
<excludesfile name="${test.exclude.file}" /> <excludesfile name="${test.exclude.file}" />
@ -682,26 +674,6 @@
<batchtest todir="@{test.dir}" if="testcase"> <batchtest todir="@{test.dir}" if="testcase">
<fileset dir="@{fileset.dir}/mapred" includes="**/${testcase}.java"/> <fileset dir="@{fileset.dir}/mapred" includes="**/${testcase}.java"/>
<fileset dir="@{fileset.dir}/unit" includes="**/${testcase}.java"/> <fileset dir="@{fileset.dir}/unit" includes="**/${testcase}.java"/>
<fileset dir="@{fileset.dir}/system/test" includes="**/${testcase}.java"/>
</batchtest>
<!--batch test to test all the testcases in aop folder with fault
injection-->
<batchtest todir="@{test.dir}" if="tests.notestcase.fi">
<fileset dir="@{fileset.dir}/aop"
includes="**/${test.include}.java"
excludes="**/${test.exclude}.java"
excludesfile="${test.exclude.file}" />
</batchtest>
<!-- batch test for testing a single test case in aop folder with
fault injection-->
<batchtest todir="@{test.dir}" if="tests.testcase.fi">
<fileset dir="@{fileset.dir}/aop" includes="**/${testcase}.java"/>
</batchtest>
<!--The following batch is for very special occasions only when
a non-FI tests are needed to be executed against FI-environment -->
<batchtest todir="@{test.dir}" if="tests.testcaseonly.fi">
<fileset dir="@{fileset.dir}/mapred"
includes="**/${testcase}.java"/>
</batchtest> </batchtest>
</junit> </junit>
<antcall target="checkfailure"/> <antcall target="checkfailure"/>
@ -710,7 +682,7 @@
<target name="run-test-mapred" depends="run-commit-test, run-smoke-test, <target name="run-test-mapred" depends="run-commit-test, run-smoke-test,
run-test-mapred-excluding-commit-and-smoke, run-test-mapred-all-withtestcaseonly" run-test-mapred-excluding-commit-and-smoke, run-test-mapred-all-withtestcaseonly"
description="Run mapred functional and system tests"> description="Run mapred functional tests">
</target> </target>
<target name="run-test-mapred-all-withtestcaseonly" <target name="run-test-mapred-all-withtestcaseonly"
@ -1192,7 +1164,7 @@
<delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-hdfs-test"/> <delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-hdfs-test"/>
</target> </target>
<target name="mvn-install" depends="mvn-taskdef,jar,jar-test,set-version,-mvn-system-install" <target name="mvn-install" depends="mvn-taskdef,jar,jar-test,set-version"
description="To install hadoop mapreduce and test jars to local filesystem's m2 cache"> description="To install hadoop mapreduce and test jars to local filesystem's m2 cache">
<artifact:pom file="${hadoop-mapred.pom}" id="hadoop.mapred"/> <artifact:pom file="${hadoop-mapred.pom}" id="hadoop.mapred"/>
<artifact:pom file="${hadoop-mapred-test.pom}" id="hadoop.mapred.test"/> <artifact:pom file="${hadoop-mapred-test.pom}" id="hadoop.mapred.test"/>
@ -1207,7 +1179,7 @@
</target> </target>
<target name="mvn-deploy" depends="mvn-taskdef, jar, jar-test, <target name="mvn-deploy" depends="mvn-taskdef, jar, jar-test,
jar-system, jar-test-system, set-version, signanddeploy, simpledeploy" set-version, signanddeploy, simpledeploy"
description="To deploy hadoop mapredice and test jar's to apache description="To deploy hadoop mapredice and test jar's to apache
snapshot's repository"/> snapshot's repository"/>
@ -1400,7 +1372,7 @@
<!-- ================================================================== --> <!-- ================================================================== -->
<!-- Clean. Delete the build files, and their directories --> <!-- Clean. Delete the build files, and their directories -->
<!-- ================================================================== --> <!-- ================================================================== -->
<target name="clean" depends="clean-contrib, clean-fi, clean-sign" description="Clean. Delete the build files, and their directories"> <target name="clean" depends="clean-contrib, clean-sign" description="Clean. Delete the build files, and their directories">
<delete dir="${build.dir}"/> <delete dir="${build.dir}"/>
<delete dir="${docs.src}/build"/> <delete dir="${docs.src}/build"/>
<delete file="${hadoop-mapred.pom}"/> <delete file="${hadoop-mapred.pom}"/>
@ -1721,8 +1693,6 @@
output="${build.dir.eclipse-main-generated-classes}" /> output="${build.dir.eclipse-main-generated-classes}" />
<source path="${test.src.dir}/mapred" <source path="${test.src.dir}/mapred"
output="${build.dir.eclipse-test-classes}" /> output="${build.dir.eclipse-test-classes}" />
<source path="${test.src.dir}/aop"
output="${build.dir.eclipse-test-classes}" />
<source path="${test.src.dir}/unit" <source path="${test.src.dir}/unit"
output="${build.dir.eclipse-test-classes}" /> output="${build.dir.eclipse-test-classes}" />
<source path="${examples.dir}" <source path="${examples.dir}"
@ -1864,11 +1834,6 @@
log="${ivyresolvelog}"/> log="${ivyresolvelog}"/>
</target> </target>
<target name="ivy-resolve-system" depends="ivy-init">
<ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="system"
log="${ivyresolvelog}"/>
</target>
<target name="ivy-retrieve" depends="ivy-resolve" <target name="ivy-retrieve" depends="ivy-resolve"
description="Retrieve Ivy-managed artifacts"> description="Retrieve Ivy-managed artifacts">
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
@ -1940,14 +1905,6 @@
<ivy:cachepath pathid="releaseaudit-classpath" conf="releaseaudit"/> <ivy:cachepath pathid="releaseaudit-classpath" conf="releaseaudit"/>
</target> </target>
<target name="ivy-retrieve-system" depends="ivy-resolve-system"
description="Retrieve Ivy-managed artifacts for the system tests">
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
log="${ivyresolvelog}"/>
<ivy:cachepath pathid="ivy-system.classpath" conf="system"/>
</target>
<target name="ivy-report" depends="ivy-resolve-releaseaudit" <target name="ivy-report" depends="ivy-resolve-releaseaudit"
description="Generate"> description="Generate">
<ivy:report todir="${build.ivy.report.dir}" settingsRef="${ant.project.name}.ivy.settings"/> <ivy:report todir="${build.ivy.report.dir}" settingsRef="${ant.project.name}.ivy.settings"/>
@ -1956,85 +1913,4 @@
</echo> </echo>
</target> </target>
<!-- Begining of fault-injection targets-->
<import file="${test.src.dir}/aop/build/aop.xml"/>
<!-- declaring mapred.src.dir as java.src.dir for aop.xml -->
<property name="java.src.dir" value="${src.dir}/java"/>
<!-- target dependency from aop.xml -->
<target name="-classes-compilation"
depends="compile-mapred-classes, compile-mapred-test"/>
<target name="jar-test-fault-inject" depends="jar-mapred-test-fault-inject"
description="Make hadoop-mapred-test-fi.jar files"/>
<!-- target to build test-fi.jar-->
<target name="jar-mapred-test-fault-inject" depends="injectfaults"
description="Make hadoop-mapred-test-fi.jar">
<macro-jar-test-fault-inject target.name="jar-test"
jar.final.name="test.final.name"
jar.final.value="${name}-test-${version}-fi"/>
</target>
<!-- target to build the hadoop-fi.jar -->
<target name="jar-fault-inject" depends="injectfaults"
description="Make hadoop-fi.jar">
<macro-jar-fault-inject
target.name="jar"
build.dir="${build-fi.dir}"
jar.final.name="final.name"
jar.final.value="${final.name}-fi" />
</target>
<!-- target to run fault injected test cases will run entire mapred test
suite-->
<target name="run-test-mapred-fault-inject" depends="injectfaults"
description="Run full suite of unit tests with fault injection">
<macro-run-tests-fault-inject target.name="run-test-mapred"
testcasesonly="false"/>
</target>
<!-- target to run non-FI tests in a FI environment-->
<target name="run-fault-inject-with-testcaseonly" depends="injectfaults">
<fail unless="testcase">
Can't run this target without -Dtestcase setting!
</fail>
<macro-run-tests-fault-inject target.name="run-test-mapred"
testcasesonly="true"/>
</target>
<condition property="tests.notestcase">
<and>
<isfalse value="${test.fault.inject}"/>
<not>
<isset property="testcase"/>
</not>
</and>
</condition>
<condition property="tests.notestcase.fi">
<and>
<not>
<isset property="testcase"/>
</not>
<istrue value="${test.fault.inject}"/>
</and>
</condition>
<condition property="test.testcase">
<and>
<isfalse value="${test.fault.inject}"/>
<isset property="testcase"/>
</and>
</condition>
<condition property="tests.testcaseonly.fi">
<istrue value="${special.fi.testcasesonly}" />
</condition>
<condition property="tests.testcase.fi">
<and>
<istrue value="${test.fault.inject}" />
<isset property="testcase" />
<isfalse value="${special.fi.testcasesonly}" />
</and>
</condition>
<!-- End of fault injection targets-->
</project> </project>

View File

@ -1,370 +0,0 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project name="aspects"
xmlns:artifact="urn:maven-artifact-ant">
<!-- The followng are duplications and have to be customized elsewhere too -->
<!-- TODO this version has to be updated synchronously with Ivy -->
<property name="aspectversion" value="1.6.5"/>
<!-- TODO this has to be changed synchronously with build.xml version prop.-->
<!-- this workarounds of test-patch setting its own 'version' -->
<property name="project.version" value="2.0.0-SNAPSHOT"/>
<!-- Properties common for all fault injections -->
<property name="build-fi.dir" value="${basedir}/build-fi"/>
<property name="hadoop-fi.jar" location="${build.dir}/${final.name}-fi.jar" />
<property name="compile-inject.output" value="${build-fi.dir}/compile-fi.log"/>
<property file="${basedir}/build.properties"/>
<!-- Properties related to system fault injection and tests -->
<property name="herriot.suffix" value="instrumented"/>
<property name="system-test-build-dir" value="${build-fi.dir}/system"/>
<!-- This varialbe is set by respective injection targets -->
<property name="hadoop.instrumented.jars" value=""/>
<!-- Properties specifically for system fault-injections and system tests -->
<property name="herriot.suffix" value="instrumented"/>
<property name="instrumented.final.name"
value="${name}-${herriot.suffix}-${version}"/>
<property name="hadoop-mapred-instrumented.pom"
location="${ivy.dir}/hadoop-mapred-${herriot.suffix}.xml" />
<property name="hadoop-mapred-instrumented-test.pom"
location="${ivy.dir}/hadoop-mapred-${herriot.suffix}-test.xml" />
<property name="hadoop-mapred-instrumented.jar"
location="${system-test-build-dir}/${name}-${herriot.suffix}-${version}.jar" />
<property name="hadoop-mapred-instrumented-sources.jar"
location="${system-test-build-dir}/${name}-${herriot.suffix}-${version}-sources.jar" />
<property name="hadoop-mapred-instrumented-test.jar"
location="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}.jar" />
<property name="hadoop-mapred-instrumented-test-sources.jar"
location="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}-sources.jar" />
<!--All Fault Injection (FI) related targets are located in this session -->
<target name="clean-fi">
<delete dir="${build-fi.dir}"/>
</target>
<!-- Weaving aspects in place
Later on one can run 'ant jar-fault-inject' to create
Hadoop jar file with instrumented classes
-->
<!-- Target -classes-compilation has to be defined in build.xml and
needs to depend on classes compilation and test classes compilation
targets. This is a poor man parametrization for targets -->
<target name="-compile-fault-inject" depends="-classes-compilation" >
<!-- AspectJ task definition -->
<taskdef
resource="org/aspectj/tools/ant/taskdefs/aspectjTaskdefs.properties">
<classpath>
<pathelement
location="${common.ivy.lib.dir}/aspectjtools-${aspectversion}.jar"/>
</classpath>
</taskdef>
<echo message="Start weaving aspects in place"/>
<path id="aspect.path">
<pathelement location="${hadoop.instrumented.jars}"/>
</path>
<iajc
encoding="${build.encoding}"
srcdir="${java.src.dir};${build.src};${src.dir.path}"
includes="org/apache/hadoop/**/*.java, org/apache/hadoop/**/*.aj"
excludes="org/apache/hadoop/record/**/*"
destDir="${dest.dir}"
debug="${javac.debug}"
target="${javac.version}"
source="${javac.version}"
deprecation="${javac.deprecation}"
fork="true"
maxmem="256m">
<aspectpath refid="aspect.path"/>
<classpath refid="test.classpath"/>
</iajc>
<loadfile property="injection.failure" srcfile="${compile-inject.output}">
<filterchain>
<linecontainsregexp>
<regexp pattern='iajc.*warning'/>
</linecontainsregexp>
</filterchain>
</loadfile>
<fail if="injection.failure">
Broken binding of advises: ${line.separator}${injection.failure}
</fail>
<echo message="Weaving of aspects is finished"/>
</target>
<!-- Classpath for running system tests -->
<path id="test.system.classpath">
<pathelement location="${hadoop.conf.dir.deployed}" />
<pathelement location="${system-test-build-dir}/test/extraconf" />
<pathelement location="${system-test-build-dir}/test/classes" />
<pathelement location="${system-test-build-dir}/classes" />
<pathelement location="${system-test-build-dir}/test/mapred/classes"/>
<pathelement location="${system-test-build-dir}/tools" />
<pathelement location="${system-test-build-dir}" />
<pathelement location="${test.src.dir}" />
<pathelement path="${clover.jar}" />
<fileset dir="${system-test-build-dir}">
<include name="**/*.jar" />
<exclude name="**/excluded/" />
</fileset>
<path refid="classpath" />
</path>
<!-- compile system tests... -->
<path id="test.system.compile.classpath">
<pathelement location="${system-test-build-dir}/test/extraconf" />
<pathelement location="${system-test-build-dir}/test/classes" />
<pathelement location="${system-test-build-dir}/classes" />
<pathelement location="${system-test-build-dir}/test/mapred/classes" />
<pathelement location="${system-test-build-dir}/test/mapred/testjar" />
<pathelement location="${system-test-build-dir}/tools" />
<pathelement location="${system-test-build-dir}/examples" />
<path refid="ivy-system.classpath"/>
</path>
<!-- ================ -->
<!-- run system tests -->
<!-- ================ -->
<target name="test-system" depends="ivy-retrieve-common, ivy-retrieve-system"
description="Run system tests">
<subant buildpath="build.xml" target="jar-test-system"/>
<subant target="test-system-contrib">
<property name="hadoop.home" value="${hadoop.home}"/>
<property name="hadoop.conf.dir" value="${hadoop.conf.dir}"/>
<property name="version" value="${version}"/>
<property name="hadoop.conf.dir.deployed"
value="${hadoop.conf.dir.deployed}"/>
<fileset file="${contrib.dir}/build.xml"/>
</subant>
<macro-test-runner test.file="${test.mapred.all.tests.file}"
suite.type="system/test"
classpath="test.system.classpath"
test.dir="${system-test-build-dir}/test"
fileset.dir="${test.src.dir}"
hadoop.conf.dir.deployed="${hadoop.conf.dir.deployed}">
</macro-test-runner>
</target>
<target name="injectfaults"
description="Instrument classes with faults and other AOP advices">
<!--mkdir to prevent <subant> failure in case the folder has been removed-->
<mkdir dir="${build-fi.dir}"/>
<delete file="${compile-inject.output}"/>
<weave-injectfault-aspects dest.dir="${build-fi.dir}/classes"
src.dir="${test.src.dir}/aop"
aspects.jars="${build-fi.dir}/ivy/lib/${ant.project.name}/test/hadoop-common-${project.version}.jar">
</weave-injectfault-aspects>
</target>
<!-- =============================================================== -->
<!-- Create hadoop-{version}-dev-core.jar required to be deployed on -->
<!-- cluster for system tests -->
<!-- =============================================================== -->
<target name="jar-system"
depends="inject-system-faults"
description="Make hadoop-mapred-instrumented.jar with system injections.">
<macro-jar-fault-inject target.name="jar"
build.dir="${system-test-build-dir}"
jar.final.name="final.name"
jar.final.value="${instrumented.final.name}">
</macro-jar-fault-inject>
<jar jarfile="${system-test-build-dir}/${instrumented.final.name}-sources.jar"
update="yes">
<fileset dir="${test.src.dir}/system/java" includes="org/apache/hadoop/**/*.java" />
<fileset dir="${test.src.dir}/system/aop" includes="org/apache/hadoop/**/*.aj" />
</jar>
</target>
<target name="jar-test-system" depends="inject-system-faults, compile-test-system"
description="Make hadoop-mapred-instrumented-test.jar with system injections.">
<subant buildpath="build.xml" target="-do-jar-test">
<property name="build.dir" value="${system-test-build-dir}"/>
<property name="test.final.name"
value="${name}-${herriot.suffix}-test-${project.version}"/>
<property name="test.build.classes"
value="${system-test-build-dir}/test/classes"/>
</subant>
<jar jarfile="${hadoop-mapred-instrumented-test-sources.jar}">
<fileset dir="${test.src.dir}/system/test" includes="org/apache/hadoop/**/*.java" />
</jar>
</target>
<target name="compile-test-system" description="Compiles system tests">
<subant buildpath="build.xml" target="-compile-test-system.wrapper">
<property name="build.dir" value="${system-test-build-dir}"/>
</subant>
</target>
<target name="-compile-test-system.wrapper" depends="inject-system-faults, ivy-retrieve-common, ivy-retrieve-system">
<macro-compile-test
source.dir="${test.src.dir}/system/test"
dest.dir="${system-test-build-dir}/test/classes"
classpath="test.system.compile.classpath"/>
</target>
<macrodef name="weave-injectfault-aspects">
<attribute name="dest.dir" />
<attribute name="src.dir" />
<attribute name="aspects.jars"/>
<attribute name="base.build.dir" default="${build-fi.dir}"/>
<sequential>
<subant buildpath="build.xml" target="-compile-fault-inject"
output="${compile-inject.output}">
<property name="build.dir" value="@{base.build.dir}" />
<property name="src.dir.path" value="@{src.dir}" />
<property name="dest.dir" value="@{dest.dir}" />
<property name="hadoop.instrumented.jars" value="@{aspects.jars}"/>
</subant>
</sequential>
</macrodef>
<target name="inject-system-faults"
description="Inject system faults">
<mkdir dir="${system-test-build-dir}"/>
<delete file="${compile-inject.output}"/>
<subant buildpath="build.xml" target="ivy-retrieve-system">
<property name="build.dir" value="${system-test-build-dir}"/>
</subant>
<weave-injectfault-aspects dest.dir="${system-test-build-dir}/classes"
src.dir="${test.src.dir}/system/java;${test.src.dir}/system/aop"
aspects.jars="${system-test-build-dir}/ivy/lib/${ant.project.name}/system/hadoop-common-${herriot.suffix}-${project.version}.jar"
base.build.dir="${system-test-build-dir}">
</weave-injectfault-aspects>
</target>
<macrodef name="macro-run-tests-fault-inject">
<attribute name="target.name" />
<attribute name="testcasesonly" />
<sequential>
<subant buildpath="build.xml" target="@{target.name}">
<property name="build.dir" value="${build-fi.dir}"/>
<property name="test.fault.inject" value="yes"/>
<property name="test.include" value="TestFi*"/>
<!-- This one is needed for the special "regression" target only -->
<property name="special.fi.testcasesonly" value="@{testcasesonly}"/>
</subant>
</sequential>
</macrodef>
<!-- ================================================================== -->
<!-- Make hadoop-fi.jar including all Fault injected artifacts -->
<!-- ================================================================== -->
<macrodef name="macro-jar-fault-inject">
<attribute name="target.name" />
<attribute name="build.dir" />
<attribute name="jar.final.name" />
<attribute name="jar.final.value" />
<sequential>
<subant buildpath="build.xml" target="@{target.name}">
<property name="build.dir" value="@{build.dir}"/>
<property name="@{jar.final.name}" value="@{jar.final.value}"/>
<property name="jar.extra.properties.list"
value="${test.src.dir}/fi-site.xml" />
</subant>
</sequential>
</macrodef>
<!-- ================================================================== -->
<!-- Make test jar files including all Fault Injected artifacts -->
<!-- ================================================================== -->
<macrodef name="macro-jar-test-fault-inject">
<attribute name="target.name" />
<attribute name="jar.final.name" />
<attribute name="jar.final.value" />
<sequential>
<subant buildpath="build.xml" target="@{target.name}">
<property name="build.dir" value="${build-fi.dir}"/>
<property name="@{jar.final.name}"
value="@{jar.final.value}"/>
</subant>
</sequential>
</macrodef>
<!--End of Fault Injection (FI) related session-->
<!-- Start of cluster controller binary target -->
<property name="runAs.src"
value ="${test.src.dir}/system/c++/runAs"/>
<property name="runAs.build.dir"
value="${system-test-build-dir}/c++-build"/>
<property name="runAs.configure.script"
value="${runAs.build.dir}/configure"/>
<target name="init-runAs-build">
<condition property="runAs.parameters.passed">
<not>
<equals arg1="${run-as.hadoop.home.dir}"
arg2="$${run-as.hadoop.home.dir}"/>
</not>
</condition>
<fail unless="runAs.parameters.passed"
message="Required parameters run-as.hadoop.home.dir not passed to the build"/>
<mkdir dir="${runAs.build.dir}"/>
<copy todir="${runAs.build.dir}" overwrite="true">
<fileset dir="${runAs.src}" includes="**/*"/>
</copy>
<chmod perm="+x" file="${runAs.configure.script}">
</chmod>
</target>
<target name="configure-runAs"
depends="init-runAs-build">
<exec executable="${runAs.configure.script}"
dir="${runAs.build.dir}" failonerror="true">
<arg value="--with-home=${run-as.hadoop.home.dir}"/>
</exec>
</target>
<target name="run-as" depends="configure-runAs">
<exec executable="${make.cmd}" dir="${runAs.build.dir}"
searchpath="yes" failonerror="yes">
<arg value="all" />
</exec>
</target>
<!-- End of cluster controller binary target -->
<!-- Install Herriot artifacts to the local Maven -->
<target name="-mvn-system-install" depends="mvn-taskdef, jar-system, jar-test-system">
<artifact:pom file="${hadoop-mapred-instrumented.pom}"
id="hadoop.mapred.${herriot.suffix}"/>
<artifact:pom file="${hadoop-mapred-instrumented-test.pom}"
id="hadoop.mapred.${herriot.suffix}.test"/>
<artifact:install file="${hadoop-mapred-instrumented.jar}">
<pom refid="hadoop.mapred.${herriot.suffix}"/>
<attach file="${hadoop-mapred-instrumented-sources.jar}" classifier="sources" />
</artifact:install>
<artifact:install file="${hadoop-mapred-instrumented-test.jar}">
<pom refid="hadoop.mapred.${herriot.suffix}.test"/>
<attach file="${hadoop-mapred-instrumented-test-sources.jar}" classifier="sources" />
</artifact:install>
</target>
<target name="-mvn-system-deploy" depends="mvn-taskdef, jar-system, jar-test-system">
<property name="repourl" value="https://repository.apache.org/content/repositories/snapshots" />
<artifact:pom file="${hadoop-mapred-instrumented.pom}"
id="hadoop.mapred.${herriot.suffix}"/>
<artifact:pom file="${hadoop-mapred-instrumented-test.pom}"
id="hadoop.mapred.${herriot.suffix}.test"/>
<artifact:install-provider artifactId="wagon-http" version="1.0-beta-2"/>
<artifact:deploy file="${hadoop-mapred-instrumented.jar}">
<remoteRepository id="apache.snapshots.https" url="${repourl}"/>
<pom refid="hadoop.mapred.${herriot.suffix}"/>
<attach file="${hadoop-mapred-instrumented-sources.jar}" classifier="sources" />
</artifact:deploy>
<artifact:deploy file="${hadoop-mapred-instrumented-test.jar}">
<remoteRepository id="apache.snapshots.https" url="${repourl}"/>
<pom refid="hadoop.mapred.${herriot.suffix}.test"/>
<attach file="${hadoop-mapred-instrumented-test-sources.jar}" classifier="sources" />
</artifact:deploy>
</target>
<!-- End of Maven -->
</project>