HDFS-2303. Unbundle jsvc. Contributed by Roman Shaposhnik and Mingjie Lai.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1299963 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
1a75ec8288
commit
ab20dae519
|
@ -25,6 +25,10 @@
|
|||
|
||||
# The java implementation to use.
|
||||
export JAVA_HOME=${JAVA_HOME}
|
||||
|
||||
# The jsvc implementation to use. Jsvc is required to run secure datanodes.
|
||||
#export JSVC_HOME=${JSVC_HOME}
|
||||
|
||||
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/etc/hadoop"}
|
||||
|
||||
# Extra Java CLASSPATH elements. Automatically insert capacity-scheduler.
|
||||
|
|
|
@ -95,6 +95,8 @@ Release 0.23.3 - UNRELEASED
|
|||
|
||||
HDFS-2676. Remove Avro RPC. (suresh)
|
||||
|
||||
HDFS-2303. Unbundle jsvc. (Roman Shaposhnik and Mingjie Lai via eli)
|
||||
|
||||
NEW FEATURES
|
||||
|
||||
HDFS-2978. The NameNode should expose name dir statuses via JMX. (atm)
|
||||
|
|
|
@ -329,49 +329,6 @@
|
|||
</tasks>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>xprepare-package-hadoop-daemon</id>
|
||||
<phase>prepare-package</phase>
|
||||
<goals>
|
||||
<goal>run</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<target unless="windows.build">
|
||||
<condition property="commons.daemon.os.name" value="darwin">
|
||||
<os name="Mac OS X"/>
|
||||
</condition>
|
||||
<condition property="commons.daemon.os.arch" value="universal">
|
||||
<os name="Mac OS X"/>
|
||||
</condition>
|
||||
<condition property="commons.daemon.os.name" value="linux">
|
||||
<os name="Linux" />
|
||||
</condition>
|
||||
<!-- Set commons.daemon.os.arch to either i686 or x86_64 for GNU/Linux -->
|
||||
<condition property="commons.daemon.os.arch" value="x86_64">
|
||||
<os name="Linux" arch="amd64"/>
|
||||
</condition>
|
||||
<condition property="commons.daemon.os.arch" value="i686">
|
||||
<os name="Linux" /> <!-- This is a guess -->
|
||||
</condition>
|
||||
<property name="commons.daemon.tar.name"
|
||||
value="commons-daemon-${commons-daemon.version}-bin-${commons.daemon.os.name}-${commons.daemon.os.arch}.tar.gz"/>
|
||||
|
||||
<mkdir dir="downloads"/>
|
||||
<get src="http://archive.apache.org/dist/commons/daemon/binaries/${commons-daemon.version}/${commons.daemon.os.name}/${commons.daemon.tar.name}"
|
||||
dest="downloads/${commons.daemon.tar.name}" verbose="true" skipexisting="true"/>
|
||||
<delete dir="${project.build.directory}/commons-daemon.staging"/>
|
||||
<mkdir dir="${project.build.directory}/commons-daemon.staging"/>
|
||||
<untar compression="gzip" src="${basedir}/downloads/${commons.daemon.tar.name}"
|
||||
dest="${project.build.directory}/commons-daemon.staging"/>
|
||||
<copy file="${project.build.directory}/commons-daemon.staging/jsvc"
|
||||
todir="${project.build.directory}/${project.artifactId}-${project.version}/libexec"
|
||||
verbose="true"/>
|
||||
<chmod perm="ugo+x" type="file">
|
||||
<fileset file="${project.build.directory}/${project.artifactId}-${project.version}/libexec/jsvc"/>
|
||||
</chmod>
|
||||
</target>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
|
|
|
@ -133,7 +133,14 @@ if [ "$starting_secure_dn" = "true" ]; then
|
|||
HADOOP_SECURE_DN_PID="$HADOOP_PID_DIR/hadoop_secure_dn.pid"
|
||||
fi
|
||||
|
||||
exec "$HADOOP_HDFS_HOME/libexec/jsvc" \
|
||||
JSVC=$JSVC_HOME/jsvc
|
||||
if [ ! -f $JSVC ]; then
|
||||
echo "JSVC_HOME is not set correctly so jsvc can not be found. Jsvc is required to run secure datanodes. "
|
||||
echo "Please download and install jsvc from http://archive.apache.org/dist/commons/daemon/binaries/ "\
|
||||
"and set JSVC_HOME to the directory containing the jsvc binary."
|
||||
exit
|
||||
fi
|
||||
exec "$JSVC" \
|
||||
-Dproc_$COMMAND -outfile "$HADOOP_LOG_DIR/jsvc.out" \
|
||||
-errfile "$HADOOP_LOG_DIR/jsvc.err" \
|
||||
-pidfile "$HADOOP_SECURE_DN_PID" \
|
||||
|
|
Loading…
Reference in New Issue