MAPREDUCE-3366. Mapreduce component should use consistent directory structure layout as HDFS/common (Eric Yang via mahadev)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1215065 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Mahadev Konar 2011-12-16 09:09:28 +00:00
parent c4ff892520
commit 6d551b83de
16 changed files with 130 additions and 114 deletions

View File

@ -22,7 +22,6 @@
<format>dir</format> <format>dir</format>
</formats> </formats>
<includeBaseDirectory>false</includeBaseDirectory> <includeBaseDirectory>false</includeBaseDirectory>
<!-- TODO: this layout is wrong. We need module specific bin files in module specific dirs -->
<fileSets> <fileSets>
<fileSet> <fileSet>
<directory>hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/target/native/target/usr/local/bin</directory> <directory>hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/target/native/target/usr/local/bin</directory>
@ -33,7 +32,7 @@
<directory>hadoop-yarn/bin</directory> <directory>hadoop-yarn/bin</directory>
<outputDirectory>bin</outputDirectory> <outputDirectory>bin</outputDirectory>
<includes> <includes>
<include>*</include> <include>yarn</include>
</includes> </includes>
<fileMode>0755</fileMode> <fileMode>0755</fileMode>
</fileSet> </fileSet>
@ -41,17 +40,81 @@
<directory>bin</directory> <directory>bin</directory>
<outputDirectory>bin</outputDirectory> <outputDirectory>bin</outputDirectory>
<includes> <includes>
<include>*</include> <include>mapred</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
<fileSet>
<directory>bin</directory>
<outputDirectory>libexec</outputDirectory>
<includes>
<include>mapred-config.sh</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
<fileSet>
<directory>hadoop-yarn/bin</directory>
<outputDirectory>libexec</outputDirectory>
<includes>
<include>yarn-config.sh</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
<fileSet>
<directory>hadoop-yarn/bin</directory>
<outputDirectory>sbin</outputDirectory>
<includes>
<include>yarn-daemon.sh</include>
<include>yarn-daemons.sh</include>
<include>start-yarn.sh</include>
<include>stop-yarn.sh</include>
</includes> </includes>
<fileMode>0755</fileMode> <fileMode>0755</fileMode>
</fileSet> </fileSet>
<fileSet> <fileSet>
<directory>hadoop-yarn/conf</directory> <directory>hadoop-yarn/conf</directory>
<outputDirectory>conf</outputDirectory> <outputDirectory>etc/hadoop</outputDirectory>
<includes> <includes>
<include>**/*</include> <include>**/*</include>
</includes> </includes>
</fileSet> </fileSet>
<fileSet>
<directory>${basedir}</directory>
<outputDirectory>/share/doc/hadoop/${hadoop.component}</outputDirectory>
<includes>
<include>*.txt</include>
</includes>
</fileSet>
<fileSet>
<directory>${project.build.directory}/webapps</directory>
<outputDirectory>/share/hadoop/${hadoop.component}/webapps</outputDirectory>
</fileSet>
<fileSet>
<directory>${basedir}/src/main/conf</directory>
<outputDirectory>/share/hadoop/${hadoop.component}/templates</outputDirectory>
<includes>
<include>*-site.xml</include>
</includes>
</fileSet>
<fileSet>
<directory>${basedir}/src/main/packages/templates/conf</directory>
<outputDirectory>/share/hadoop/${hadoop.component}/templates/conf</outputDirectory>
<includes>
<include>*</include>
</includes>
</fileSet>
<fileSet>
<directory>${basedir}/dev-support/jdiff</directory>
<outputDirectory>/share/hadoop/${hadoop.component}/jdiff</outputDirectory>
</fileSet>
<fileSet>
<directory>${project.build.directory}/site/jdiff/xml</directory>
<outputDirectory>/share/hadoop/${hadoop.component}/jdiff</outputDirectory>
</fileSet>
<fileSet>
<directory>${project.build.directory}/site</directory>
<outputDirectory>/share/doc/hadoop/${hadoop.component}</outputDirectory>
</fileSet>
</fileSets> </fileSets>
<moduleSets> <moduleSets>
<moduleSet> <moduleSet>
@ -59,7 +122,7 @@
<exclude>org.apache.hadoop:hadoop-yarn-server-tests</exclude> <exclude>org.apache.hadoop:hadoop-yarn-server-tests</exclude>
</excludes> </excludes>
<binaries> <binaries>
<outputDirectory>modules</outputDirectory> <outputDirectory>share/hadoop/${hadoop.component}</outputDirectory>
<includeDependencies>false</includeDependencies> <includeDependencies>false</includeDependencies>
<unpack>false</unpack> <unpack>false</unpack>
</binaries> </binaries>
@ -68,7 +131,7 @@
<dependencySets> <dependencySets>
<dependencySet> <dependencySet>
<useProjectArtifact>false</useProjectArtifact> <useProjectArtifact>false</useProjectArtifact>
<outputDirectory>/lib</outputDirectory> <outputDirectory>/share/hadoop/${hadoop.component}/lib</outputDirectory>
<!-- Exclude hadoop artifacts. They will be found via HADOOP* env --> <!-- Exclude hadoop artifacts. They will be found via HADOOP* env -->
<excludes> <excludes>
<exclude>org.apache.hadoop:hadoop-common</exclude> <exclude>org.apache.hadoop:hadoop-common</exclude>

View File

@ -231,6 +231,23 @@ fi
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/share/hadoop/hdfs'/*' CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/share/hadoop/hdfs'/*'
# put yarn in classpath if present
if [ "$YARN_HOME" = "" ]; then
if [ -d "${HADOOP_PREFIX}/share/hadoop/mapreduce" ]; then
YARN_HOME=$HADOOP_PREFIX
fi
fi
if [ -d "$YARN_HOME/share/hadoop/mapreduce/webapps" ]; then
CLASSPATH=${CLASSPATH}:$YARN_HOME/share/hadoop/mapreduce
fi
if [ -d "$YARN_HOME/share/hadoop/mapreduce/lib" ]; then
CLASSPATH=${CLASSPATH}:$YARN_HOME/share/hadoop/mapreduce/lib'/*'
fi
CLASSPATH=${CLASSPATH}:$YARN_HOME/share/hadoop/mapreduce'/*'
# cygwin path translation # cygwin path translation
if $cygwin; then if $cygwin; then
HADOOP_HDFS_HOME=`cygpath -w "$HADOOP_HDFS_HOME"` HADOOP_HDFS_HOME=`cygpath -w "$HADOOP_HDFS_HOME"`

View File

@ -76,6 +76,9 @@
<id>dist</id> <id>dist</id>
<activation> <activation>
<activeByDefault>false</activeByDefault> <activeByDefault>false</activeByDefault>
<property>
<name>tar|rpm|deb</name>
</property>
</activation> </activation>
<build> <build>
<plugins> <plugins>
@ -114,15 +117,6 @@
run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${project.version}/* . run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${project.version}/* .
run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${project.version}/* . run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${project.version}/* .
run cp -r $ROOT/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version}/* . run cp -r $ROOT/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version}/* .
COMMON_LIB=share/hadoop/common/lib
MODULES=../../../../modules
run ln -s $MODULES/hadoop-mapreduce-client-app-${project.version}.jar $COMMON_LIB
run ln -s $MODULES/hadoop-yarn-api-${project.version}.jar $COMMON_LIB
run ln -s $MODULES/hadoop-mapreduce-client-common-${project.version}.jar $COMMON_LIB
run ln -s $MODULES/hadoop-yarn-common-${project.version}.jar $COMMON_LIB
run ln -s $MODULES/hadoop-mapreduce-client-core-${project.version}.jar $COMMON_LIB
run ln -s $MODULES/hadoop-yarn-server-common-${project.version}.jar $COMMON_LIB
run ln -s $MODULES/hadoop-mapreduce-client-jobclient-${project.version}.jar $COMMON_LIB
echo echo
echo "Hadoop dist layout available at: ${project.build.directory}/hadoop-${project.version}" echo "Hadoop dist layout available at: ${project.build.directory}/hadoop-${project.version}"
echo echo

View File

@ -323,6 +323,9 @@ Release 0.23.1 - Unreleased
MAPREDUCE-3422. Counter display names are not being picked up. (Jonathan MAPREDUCE-3422. Counter display names are not being picked up. (Jonathan
Eagles via sseth) Eagles via sseth)
MAPREDUCE-3366. Mapreduce component should use consistent directory structure
layout as HDFS/common (Eric Yang via mahadev)
Release 0.23.0 - 2011-11-01 Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -55,11 +55,11 @@ Step 8) Modify mapred-site.xml to use yarn framework
Step 9) cd $YARN_HOME Step 9) cd $YARN_HOME
Step 10) bin/yarn-daemon.sh start resourcemanager Step 10) sbin/yarn-daemon.sh start resourcemanager
Step 11) bin/yarn-daemon.sh start nodemanager Step 11) sbin/yarn-daemon.sh start nodemanager
Step 12) bin/yarn-daemon.sh start historyserver Step 12) sbin/yarn-daemon.sh start historyserver
Step 13) You are all set, an example on how to run a mapreduce job is: Step 13) You are all set, an example on how to run a mapreduce job is:
cd $HADOOP_MAPRED_HOME cd $HADOOP_MAPRED_HOME

View File

@ -38,7 +38,7 @@ fi
bin=`dirname "${BASH_SOURCE-$0}"` bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
DEFAULT_LIBEXEC_DIR="$bin" DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR} HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/yarn-config.sh . $HADOOP_LIBEXEC_DIR/yarn-config.sh

View File

@ -23,7 +23,7 @@ echo "starting yarn daemons"
bin=`dirname "${BASH_SOURCE-$0}"` bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
DEFAULT_LIBEXEC_DIR="$bin" DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR} HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/yarn-config.sh . $HADOOP_LIBEXEC_DIR/yarn-config.sh

View File

@ -23,7 +23,7 @@ echo "stopping yarn daemons"
bin=`dirname "${BASH_SOURCE-$0}"` bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
DEFAULT_LIBEXEC_DIR="$bin" DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR} HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/yarn-config.sh . $HADOOP_LIBEXEC_DIR/yarn-config.sh

View File

@ -44,7 +44,7 @@
bin=`dirname "${BASH_SOURCE-$0}"` bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
DEFAULT_LIBEXEC_DIR="$bin" DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR} HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/yarn-config.sh . $HADOOP_LIBEXEC_DIR/yarn-config.sh
@ -109,8 +109,7 @@ if [ ! -d "$HADOOP_CONF_DIR" ]; then
exit 1 exit 1
fi fi
CLASSPATH="${HADOOP_CONF_DIR}:${YARN_CONF_DIR}" CLASSPATH="${HADOOP_CONF_DIR}:${YARN_CONF_DIR}:${CLASSPATH}"
CLASSPATH=${CLASSPATH}:${YARN_CLASSPATH}
CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
# for developers, add Hadoop classes to CLASSPATH # for developers, add Hadoop classes to CLASSPATH
@ -146,38 +145,6 @@ fi
# so that filenames w/ spaces are handled correctly in loops below # so that filenames w/ spaces are handled correctly in loops below
IFS= IFS=
# add hadoop-common libs to CLASSPATH
if [ ! -d "$HADOOP_COMMON_HOME" ]; then
if [ -d "$HADOOP_PREFIX" ]; then
export HADOOP_COMMON_HOME=$HADOOP_PREFIX
else
echo No HADOOP_COMMON_HOME set.
echo Please specify it either in yarn-env.sh or in the environment.
exit 1
fi
fi
CLASSPATH=${CLASSPATH}:$HADOOP_COMMON_HOME/share/hadoop/common'/*'
CLASSPATH=${CLASSPATH}:$HADOOP_COMMON_HOME/share/hadoop/common/lib'/*'
# add hadoop-hdfs libs to CLASSPATH
if [ ! -d "$HADOOP_HDFS_HOME" ]; then
if [ -d "$HADOOP_PREFIX" ]; then
export HADOOP_HDFS_HOME=$HADOOP_PREFIX
else
echo No HADOOP_HDFS_HOME set.
echo Please specify it either in yarn-env.sh or in the environment.
exit 1
fi
fi
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/share/hadoop/hdfs'/*'
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib'/*'
# add yarn libs to CLASSPATH
CLASSPATH=${CLASSPATH}:$YARN_HOME/modules'/*'
CLASSPATH=${CLASSPATH}:$YARN_HOME/lib'/*'
# default log directory & file # default log directory & file
if [ "$YARN_LOG_DIR" = "" ]; then if [ "$YARN_LOG_DIR" = "" ]; then
YARN_LOG_DIR="$YARN_HOME/logs" YARN_LOG_DIR="$YARN_HOME/logs"

View File

@ -15,29 +15,24 @@
# included in all the hadoop scripts with source command # included in all the hadoop scripts with source command
# should not be executable directly # should not be executable directly
# also should not be passed any arguments, since we need original $* bin=`which "$0"`
bin=`dirname "${bin}"`
# resolve links - $0 may be a softlink
this="$0"
while [ -h "$this" ]; do
ls=`ls -ld "$this"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '.*/.*' > /dev/null; then
this="$link"
else
this=`dirname "$this"`/"$link"
fi
done
# convert relative path to absolute path
bin=`dirname "$this"`
script=`basename "$this"`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
this="$bin/$script"
# the root of the Hadoop installation export HADOOP_PREFIX="${HADOOP_PREFIX:-$bin/..}"
export YARN_HOME=`dirname "$this"`/..
DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
if [ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]; then
. ${HADOOP_LIBEXEC_DIR}/hadoop-config.sh
elif [ -e "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" ]; then
. "$HADOOP_COMMON_HOME"/libexec/hadoop-config.sh
elif [ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]; then
. "$HADOOP_HOME"/libexec/hadoop-config.sh
else
echo "Hadoop common not found."
exit
fi
# Same glibc bug that discovered in Hadoop. # Same glibc bug that discovered in Hadoop.
# Without this you can see very large vmem settings on containers. # Without this you can see very large vmem settings on containers.
@ -56,7 +51,7 @@ then
fi fi
# Allow alternate conf dir location. # Allow alternate conf dir location.
YARN_CONF_DIR="${YARN_CONF_DIR:-$YARN_HOME/conf}" YARN_CONF_DIR="${HADOOP_CONF_DIR:-$YARN_HOME/conf}"
#check to see it is specified whether to use the slaves or the #check to see it is specified whether to use the slaves or the
# masters file # masters file

View File

@ -39,7 +39,7 @@ fi
bin=`dirname "${BASH_SOURCE-$0}"` bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
DEFAULT_LIBEXEC_DIR="$bin" DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR} HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/yarn-config.sh . $HADOOP_LIBEXEC_DIR/yarn-config.sh

View File

@ -30,7 +30,7 @@ fi
bin=`dirname "${BASH_SOURCE-$0}"` bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
DEFAULT_LIBEXEC_DIR="$bin" DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR} HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/yarn-config.sh . $HADOOP_LIBEXEC_DIR/yarn-config.sh

View File

@ -41,10 +41,6 @@ if [ "$YARN_HEAPSIZE" != "" ]; then
#echo $JAVA_HEAP_MAX #echo $JAVA_HEAP_MAX
fi fi
# CLASSPATH initially contains $YARN_CONF_DIR
CLASSPATH="${YARN_CONF_DIR}"
CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
# so that filenames w/ spaces are handled correctly in loops below # so that filenames w/ spaces are handled correctly in loops below
IFS= IFS=

View File

@ -95,8 +95,8 @@ public interface ApplicationConstants {
"$HADOOP_COMMON_HOME/share/hadoop/common/lib/*", "$HADOOP_COMMON_HOME/share/hadoop/common/lib/*",
"$HADOOP_HDFS_HOME/share/hadoop/hdfs/*", "$HADOOP_HDFS_HOME/share/hadoop/hdfs/*",
"$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*", "$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*",
"$YARN_HOME/modules/*", "$YARN_HOME/share/hadoop/mapreduce/*",
"$YARN_HOME/lib/*" "$YARN_HOME/share/hadoop/mapreduce/lib/*"
}; };
/** /**

View File

@ -171,20 +171,6 @@ Add the following configs to your <<<yarn-site.xml>>>
</property> </property>
+---+ +---+
* Create Symlinks.
You will have to create the following symlinks:
+---+
$ cd $HADOOP_COMMON_HOME/share/hadoop/common/lib/
$ ln -s $HADOOP_MAPRED_HOME/modules/hadoop-mapreduce-client-app-*-SNAPSHOT.jar .
$ ln -s $HADOOP_MAPRED_HOME/modules/hadoop-mapreduce-client-jobclient-*-SNAPSHOT.jar .
$ ln -s $HADOOP_MAPRED_HOME/modules/hadoop-mapreduce-client-common-*-SNAPSHOT.jar .
$ ln -s $HADOOP_MAPRED_HOME/modules/hadoop-mapreduce-client-shuffle-*-SNAPSHOT.jar .
$ ln -s $HADOOP_MAPRED_HOME/modules/hadoop-mapreduce-client-core-*-SNAPSHOT.jar .
$ ln -s $HADOOP_MAPRED_HOME/modules/hadoop-yarn-common-*-SNAPSHOT.jar .
$ ln -s $HADOOP_MAPRED_HOME/modules/hadoop-yarn-api-*-SNAPSHOT.jar .
+---+
* Running daemons. * Running daemons.
Assuming that the environment variables <<$HADOOP_COMMON_HOME>>, <<$HADOOP_HDFS_HOME>>, <<$HADOO_MAPRED_HOME>>, Assuming that the environment variables <<$HADOOP_COMMON_HOME>>, <<$HADOOP_HDFS_HOME>>, <<$HADOO_MAPRED_HOME>>,
@ -195,8 +181,8 @@ $ ln -s $HADOOP_MAPRED_HOME/modules/hadoop-yarn-api-*-SNAPSHOT.jar .
+---+ +---+
$ cd $HADOOP_MAPRED_HOME $ cd $HADOOP_MAPRED_HOME
$ bin/yarn-daemon.sh start resourcemanager $ sbin/yarn-daemon.sh start resourcemanager
$ bin/yarn-daemon.sh start nodemanager $ sbin/yarn-daemon.sh start nodemanager
+---+ +---+
You should be up and running. You can run randomwriter as: You should be up and running. You can run randomwriter as:

View File

@ -34,6 +34,8 @@
<test.timeout>600000</test.timeout> <test.timeout>600000</test.timeout>
<fork.mode>once</fork.mode> <fork.mode>once</fork.mode>
<mr.basedir>${basedir}</mr.basedir> <mr.basedir>${basedir}</mr.basedir>
<hadoop.component>mapreduce</hadoop.component>
<is.hadoop.component>true</is.hadoop.component>
</properties> </properties>
<modules> <modules>
@ -321,7 +323,10 @@
<profiles> <profiles>
<profile> <profile>
<id>release</id> <id>dist</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build> <build>
<plugins> <plugins>
<plugin> <plugin>
@ -336,16 +341,6 @@
</execution> </execution>
</executions> </executions>
</plugin> </plugin>
</plugins>
</build>
</profile>
<profile>
<id>dist</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId> <artifactId>maven-assembly-plugin</artifactId>
@ -367,7 +362,7 @@
</configuration> </configuration>
<executions> <executions>
<execution> <execution>
<id>dist</id> <id>package-mapreduce</id>
<phase>prepare-package</phase> <phase>prepare-package</phase>
<goals> <goals>
<goal>single</goal> <goal>single</goal>