HDFS-2303. Unbundle jsvc. Contributed by Roman Shaposhnik and Mingjie Lai.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1299963 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Eli Collins 2012-03-13 03:38:57 +00:00
parent 1a75ec8288
commit ab20dae519
4 changed files with 14 additions and 44 deletions

View File

@ -25,6 +25,10 @@
# The java implementation to use. # The java implementation to use.
export JAVA_HOME=${JAVA_HOME} export JAVA_HOME=${JAVA_HOME}
# The jsvc implementation to use. Jsvc is required to run secure datanodes.
#export JSVC_HOME=${JSVC_HOME}
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/etc/hadoop"} export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/etc/hadoop"}
# Extra Java CLASSPATH elements. Automatically insert capacity-scheduler. # Extra Java CLASSPATH elements. Automatically insert capacity-scheduler.

View File

@ -95,6 +95,8 @@ Release 0.23.3 - UNRELEASED
HDFS-2676. Remove Avro RPC. (suresh) HDFS-2676. Remove Avro RPC. (suresh)
HDFS-2303. Unbundle jsvc. (Roman Shaposhnik and Mingjie Lai via eli)
NEW FEATURES NEW FEATURES
HDFS-2978. The NameNode should expose name dir statuses via JMX. (atm) HDFS-2978. The NameNode should expose name dir statuses via JMX. (atm)

View File

@ -329,49 +329,6 @@
</tasks> </tasks>
</configuration> </configuration>
</execution> </execution>
<execution>
<id>xprepare-package-hadoop-daemon</id>
<phase>prepare-package</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target unless="windows.build">
<condition property="commons.daemon.os.name" value="darwin">
<os name="Mac OS X"/>
</condition>
<condition property="commons.daemon.os.arch" value="universal">
<os name="Mac OS X"/>
</condition>
<condition property="commons.daemon.os.name" value="linux">
<os name="Linux" />
</condition>
<!-- Set commons.daemon.os.arch to either i686 or x86_64 for GNU/Linux -->
<condition property="commons.daemon.os.arch" value="x86_64">
<os name="Linux" arch="amd64"/>
</condition>
<condition property="commons.daemon.os.arch" value="i686">
<os name="Linux" /> <!-- This is a guess -->
</condition>
<property name="commons.daemon.tar.name"
value="commons-daemon-${commons-daemon.version}-bin-${commons.daemon.os.name}-${commons.daemon.os.arch}.tar.gz"/>
<mkdir dir="downloads"/>
<get src="http://archive.apache.org/dist/commons/daemon/binaries/${commons-daemon.version}/${commons.daemon.os.name}/${commons.daemon.tar.name}"
dest="downloads/${commons.daemon.tar.name}" verbose="true" skipexisting="true"/>
<delete dir="${project.build.directory}/commons-daemon.staging"/>
<mkdir dir="${project.build.directory}/commons-daemon.staging"/>
<untar compression="gzip" src="${basedir}/downloads/${commons.daemon.tar.name}"
dest="${project.build.directory}/commons-daemon.staging"/>
<copy file="${project.build.directory}/commons-daemon.staging/jsvc"
todir="${project.build.directory}/${project.artifactId}-${project.version}/libexec"
verbose="true"/>
<chmod perm="ugo+x" type="file">
<fileset file="${project.build.directory}/${project.artifactId}-${project.version}/libexec/jsvc"/>
</chmod>
</target>
</configuration>
</execution>
</executions> </executions>
</plugin> </plugin>
<plugin> <plugin>

View File

@ -133,7 +133,14 @@ if [ "$starting_secure_dn" = "true" ]; then
HADOOP_SECURE_DN_PID="$HADOOP_PID_DIR/hadoop_secure_dn.pid" HADOOP_SECURE_DN_PID="$HADOOP_PID_DIR/hadoop_secure_dn.pid"
fi fi
exec "$HADOOP_HDFS_HOME/libexec/jsvc" \ JSVC=$JSVC_HOME/jsvc
if [ ! -f $JSVC ]; then
echo "JSVC_HOME is not set correctly so jsvc can not be found. Jsvc is required to run secure datanodes. "
echo "Please download and install jsvc from http://archive.apache.org/dist/commons/daemon/binaries/ "\
"and set JSVC_HOME to the directory containing the jsvc binary."
exit
fi
exec "$JSVC" \
-Dproc_$COMMAND -outfile "$HADOOP_LOG_DIR/jsvc.out" \ -Dproc_$COMMAND -outfile "$HADOOP_LOG_DIR/jsvc.out" \
-errfile "$HADOOP_LOG_DIR/jsvc.err" \ -errfile "$HADOOP_LOG_DIR/jsvc.err" \
-pidfile "$HADOOP_SECURE_DN_PID" \ -pidfile "$HADOOP_SECURE_DN_PID" \