HADOOP-6847. Problem staging 0.21.0 artifacts to Apache Nexus Maven Repository. Contributed by Giridharan Kesavan

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@961559 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Konstantin Boudnik 2010-07-08 02:29:15 +00:00
parent 2a248dfc32
commit 038d399389
4 changed files with 133 additions and 19 deletions

View File

@ -1018,6 +1018,9 @@ Release 0.21.0 - Unreleased
BUG FIXES BUG FIXES
HADOOP-6847. Problem staging 0.21.0 artifacts to Apache Nexus Maven
Repository (Giridharan Kesavan via cos)
HADOOP-6748. Removes hadoop.cluster.administrators, cluster administrators HADOOP-6748. Removes hadoop.cluster.administrators, cluster administrators
acl is passed as parameter in constructor. (amareshwari) acl is passed as parameter in constructor. (amareshwari)

125
build.xml
View File

@ -133,7 +133,10 @@
<property name="ivy.repo.dir" value="${user.home}/ivyrepo" /> <property name="ivy.repo.dir" value="${user.home}/ivyrepo" />
<property name="ivy.dir" location="ivy" /> <property name="ivy.dir" location="ivy" />
<loadproperties srcfile="${ivy.dir}/libraries.properties"/> <loadproperties srcfile="${ivy.dir}/libraries.properties"/>
<property name="asfrepo" value="https://repository.apache.org/content/repositories/snapshots"/> <property name="asfrepo" value="https://repository.apache.org"/>
<property name="asfsnapshotrepo" value="${asfrepo}/content/repositories/snapshots"/>
<property name="asfstagingrepo"
value="${asfrepo}/service/local/staging/deploy/maven2"/>
<property name="mvnrepo" value="http://repo2.maven.org/maven2"/> <property name="mvnrepo" value="http://repo2.maven.org/maven2"/>
<property name="ivy.jar" location="${ivy.dir}/ivy-${ivy.version}.jar"/> <property name="ivy.jar" location="${ivy.dir}/ivy-${ivy.version}.jar"/>
<property name="ant_task.jar" location="${ivy.dir}/maven-ant-tasks-${ant-task.version}.jar"/> <property name="ant_task.jar" location="${ivy.dir}/maven-ant-tasks-${ant-task.version}.jar"/>
@ -188,7 +191,11 @@
</and> </and>
</condition> </condition>
<!-- the normal classpath --> <condition property="staging">
<equals arg1="${repo}" arg2="staging"/>
</condition>
<!-- the normal classpath -->
<path id="classpath"> <path id="classpath">
<pathelement location="${build.classes}"/> <pathelement location="${build.classes}"/>
<pathelement location="${conf.dir}"/> <pathelement location="${conf.dir}"/>
@ -1222,23 +1229,115 @@
</artifact:install> </artifact:install>
</target> </target>
<target name="mvn-deploy" depends="mvn-taskdef, jar, jar-test, set-version,
-mvn-system-deploy" <target name="mvn-deploy" depends="mvn-taskdef, jar, jar-test,
description="To deploy hadoop common and test jar's to apache snapshot's repository"> jar-system, set-version, signanddeploy, simpledeploy"
description="To deploy hadoop common and test jar's to apache
snapshot's repository"/>
<target name="signanddeploy" if="staging" depends="sign">
<artifact:pom file="${hadoop-common.pom}" id="hadoop.core"/> <artifact:pom file="${hadoop-common.pom}" id="hadoop.core"/>
<artifact:pom file="${hadoop-common-test.pom}" id="hadoop.core.test"/> <artifact:pom file="${hadoop-common-test.pom}" id="hadoop.core.test"/>
<artifact:pom file="${hadoop-common-instrumented.pom}"
id="hadoop.core.${herriot.suffix}"/>
<artifact:install-provider artifactId="wagon-http"
version="${wagon-http.version}"/>
<artifact:install-provider artifactId="wagon-http" version="1.0-beta-2"/>
<artifact:deploy file="${hadoop-common.jar}"> <artifact:deploy file="${hadoop-common.jar}">
<remoteRepository id="apache.snapshots.https" url="${asfrepo}"/> <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
<pom refid="hadoop.core"/>
<attach file="${hadoop-common.jar}.asc" type="jar.asc"/>
<attach file="${hadoop-common.pom}.asc" type="pom.asc"/>
<attach file="${hadoop-common-sources.jar}.asc" type="jar.asc" />
<attach file="${hadoop-common-sources.jar}" classifier="sources"/>
</artifact:deploy>
<artifact:deploy file="${hadoop-common-test.jar}">
<remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
<pom refid="hadoop.core.test"/>
<attach file="${hadoop-common-test.jar}.asc" type="jar.asc"/>
<attach file="${hadoop-common-test.pom}.asc" type="pom.asc"/>
<attach file="${hadoop-common-test-sources.jar}.asc" type="jar.asc"/>
<attach file="${hadoop-common-test-sources.jar}" classifier="sources"/>
</artifact:deploy>
<artifact:deploy file="${hadoop-common-instrumented.jar}">
<remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
<pom refid="hadoop.core.${herriot.suffix}"/>
<attach file="${hadoop-common-instrumented.jar}.asc" type="jar.asc"/>
<attach file="${hadoop-common-instrumented.pom}.asc" type="pom.asc"/>
<attach file="${hadoop-common-instrumented-sources.jar}.asc"
type="jar.asc"/>
<attach file="${hadoop-common-instrumented-sources.jar}"
classifier="sources"/>
</artifact:deploy>
</target>
<target name="sign" depends="clean-sign" if="staging">
<input message="password:>" addproperty="gpg.passphrase">
<handler classname="org.apache.tools.ant.input.SecureInputHandler" />
</input>
<macrodef name="sign-artifact" description="Signs the artifact">
<attribute name="input.file"/>
<attribute name="output.file" default="@{input.file}.asc"/>
<attribute name="gpg.passphrase"/>
<sequential>
<echo>Signing @{input.file} Sig File: @{output.file}</echo>
<exec executable="gpg" >
<arg value="--armor"/>
<arg value="--output"/>
<arg value="@{output.file}"/>
<arg value="--passphrase"/>
<arg value="@{gpg.passphrase}"/>
<arg value="--detach-sig"/>
<arg value="@{input.file}"/>
</exec>
</sequential>
</macrodef>
<sign-artifact input.file="${hadoop-common.jar}"
output.file="${hadoop-common.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${hadoop-common-test.jar}"
output.file="${hadoop-common-test.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${hadoop-common-sources.jar}"
output.file="${hadoop-common-sources.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${hadoop-common-test-sources.jar}"
output.file="${hadoop-common-test-sources.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${hadoop-common.pom}"
output.file="${hadoop-common.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${hadoop-common-test.pom}"
output.file="${hadoop-common-test.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${hadoop-common-instrumented.jar}"
output.file="${hadoop-common-instrumented.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${hadoop-common-instrumented.pom}"
output.file="${hadoop-common-instrumented.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${hadoop-common-instrumented-sources.jar}"
output.file="${hadoop-common-instrumented-sources.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
</target>
<target name="simpledeploy" unless="staging">
<artifact:pom file="${hadoop-common.pom}" id="hadoop.core"/>
<artifact:pom file="${hadoop-common-test.pom}" id="hadoop.test"/>
<artifact:pom file="${hadoop-common-instrumented.pom}"
id="hadoop.core.${herriot.suffix}"/>
<artifact:install-provider artifactId="wagon-http" version="${wagon-http.version}"/>
<artifact:deploy file="${hadoop-common.jar}">
<remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
<pom refid="hadoop.core"/> <pom refid="hadoop.core"/>
<attach file="${hadoop-common-sources.jar}" classifier="sources" /> <attach file="${hadoop-common-sources.jar}" classifier="sources" />
</artifact:deploy> </artifact:deploy>
<artifact:deploy file="${hadoop-common-test.jar}"> <artifact:deploy file="${hadoop-common-test.jar}">
<remoteRepository id="apache.snapshots.https" url="${asfrepo}"/> <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
<pom refid="hadoop.core.test"/> <pom refid="hadoop.core.test"/>
<attach file="${hadoop-common-test-sources.jar}" classifier="sources" /> <attach file="${hadoop-common-test-sources.jar}" classifier="sources" />
</artifact:deploy> </artifact:deploy>
<artifact:deploy file="${hadoop-common-instrumented.jar}">
<remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
<pom refid="hadoop.core.${herriot.suffix}"/>
<attach file="${hadoop-common-instrumented-sources.jar}" classifier="sources" />
</artifact:deploy>
</target> </target>
<target name="set-version"> <target name="set-version">
@ -1287,15 +1386,23 @@
<!-- ================================================================== --> <!-- ================================================================== -->
<!-- Clean. Delete the build files, and their directories --> <!-- Clean. Delete the build files, and their directories -->
<!-- ================================================================== --> <!-- ================================================================== -->
<target name="clean" depends="clean-contrib, clean-fi" description="Clean. Delete the build files, and their directories"> <target name="clean" depends="clean-contrib, clean-sign, clean-fi" description="Clean. Delete the build files, and their directories">
<delete dir="${build.dir}"/> <delete dir="${build.dir}"/>
<delete file="${basedir}/ivy/hadoop-common.xml"/> <delete file="${basedir}/ivy/hadoop-common.xml"/>
<delete file="${basedir}/ivy/hadoop-common-pom.xml"/>
<delete file="${basedir}/ivy/hadoop-common-test.xml"/> <delete file="${basedir}/ivy/hadoop-common-test.xml"/>
<delete file="${basedir}/ivy/hadoop-common-test-pom.xml"/>
<delete file="${basedir}/ivy/hadoop-common-${herriot.suffix}.xml"/> <delete file="${basedir}/ivy/hadoop-common-${herriot.suffix}.xml"/>
<delete dir="${docs.src}/build"/> <delete dir="${docs.src}/build"/>
<delete dir="${src.docs.cn}/build"/> <delete dir="${src.docs.cn}/build"/>
</target> </target>
<target name="clean-sign" description="Clean. Delete .asc files">
<delete>
<fileset dir="." includes="**/**/*.asc"/>
</delete>
</target>
<target name="veryclean" depends="clean" description="Delete mvn ant task jar and ivy ant taks jar"> <target name="veryclean" depends="clean" description="Delete mvn ant task jar and ivy ant taks jar">
<delete file="${ant_task.jar}"/> <delete file="${ant_task.jar}"/>
<delete file="${ivy.jar}"/> <delete file="${ivy.jar}"/>

View File

@ -70,6 +70,8 @@ servlet-api.version=2.5
slf4j-api.version=1.5.11 slf4j-api.version=1.5.11
slf4j-log4j12.version=1.5.11 slf4j-log4j12.version=1.5.11
wagon-http.version=1.0-beta-2
xmlenc.version=0.52 xmlenc.version=0.52
xerces.version=1.4.4 xerces.version=1.4.4

View File

@ -29,12 +29,13 @@
<!-- Properties specifically for system fault-injections and system tests --> <!-- Properties specifically for system fault-injections and system tests -->
<property name="herriot.suffix" value="instrumented"/> <property name="herriot.suffix" value="instrumented"/>
<property name="herriot.final.name" value="${name}-${herriot.suffix}-${version}"/>
<property name="hadoop-common-instrumented.pom" <property name="hadoop-common-instrumented.pom"
location="${ivy.dir}/hadoop-common-${herriot.suffix}.xml" /> location="${ivy.dir}/hadoop-common-${herriot.suffix}.xml" />
<property name="hadoop-common-instrumented.jar" <property name="hadoop-common-instrumented.jar"
location="${system-test-build-dir}/${final.name}-${herriot.suffix}.jar" /> location="${system-test-build-dir}/${herriot.final.name}.jar" />
<property name="hadoop-common-instrumented-sources.jar" <property name="hadoop-common-instrumented-sources.jar"
location="${system-test-build-dir}/${final.name}-${herriot.suffix}-sources.jar" /> location="${system-test-build-dir}/${herriot.final.name}-sources.jar" />
<!--All Fault Injection (FI) related targets are located in this session --> <!--All Fault Injection (FI) related targets are located in this session -->
@ -129,9 +130,9 @@
<macro-jar-fault-inject target.name="jar" <macro-jar-fault-inject target.name="jar"
build.dir="${system-test-build-dir}" build.dir="${system-test-build-dir}"
jar.final.name="final.name" jar.final.name="final.name"
jar.final.value="${final.name}-${herriot.suffix}"> jar.final.value="${herriot.final.name}">
</macro-jar-fault-inject> </macro-jar-fault-inject>
<jar jarfile="${system-test-build-dir}/${final.name}-${herriot.suffix}-sources.jar" <jar jarfile="${system-test-build-dir}/${herriot.final.name}-sources.jar"
update="yes"> update="yes">
<fileset dir="${test.src.dir}/system/java" includes="org/apache/hadoop/**/*.java"/> <fileset dir="${test.src.dir}/system/java" includes="org/apache/hadoop/**/*.java"/>
<fileset dir="${test.src.dir}/system/aop" includes="org/apache/hadoop/**/*.aj"/> <fileset dir="${test.src.dir}/system/aop" includes="org/apache/hadoop/**/*.aj"/>
@ -258,7 +259,8 @@
<attach file="${hadoop-common-instrumented-sources.jar}" classifier="sources" /> <attach file="${hadoop-common-instrumented-sources.jar}" classifier="sources" />
</artifact:install> </artifact:install>
</target> </target>
<!-- -mvn-system-deploy target is no more called from the root
build.xml -->
<target name="-mvn-system-deploy" depends="mvn-taskdef, jar-system"> <target name="-mvn-system-deploy" depends="mvn-taskdef, jar-system">
<artifact:pom file="${hadoop-common-instrumented.pom}" <artifact:pom file="${hadoop-common-instrumented.pom}"
id="hadoop.core.${herriot.suffix}"/> id="hadoop.core.${herriot.suffix}"/>