HADOOP-12850. pull shell code out of hadoop-dist

Signed-off-by: Steve Loughran <stevel@apache.org>
This commit is contained in:
Allen Wittenauer 2016-02-28 16:51:51 -08:00
parent c58a6d53c5
commit 1cb2f93451
3 changed files with 202 additions and 130 deletions

View File

@ -0,0 +1,140 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# project.version
VERSION=$1
# project.build.directory
BASEDIR=$2
function run()
{
declare res
echo "\$ ${*}"
"${@}"
res=$?
if [[ ${res} != 0 ]]; then
echo
echo "Failed!"
echo
exit "${res}"
fi
}
function findfileindir()
{
declare file="$1"
declare dir="${2:-./share}"
declare count
count=$(find "${dir}" -iname "${file}" | wc -l)
#shellcheck disable=SC2086
echo ${count}
}
function copyifnotexists()
{
declare src="$1"
declare dest="$2"
declare srcname
declare destdir
declare child
declare childpath
if [[ -f "${src}" ]]; then
srcname=${src##*/}
if [[ "${srcname}" != *.jar ||
$(findfileindir "${srcname}") -eq "0" ]]; then
destdir=$(dirname "${dest}")
mkdir -p "${destdir}"
cp -p "${src}" "${dest}"
fi
else
for childpath in "${src}"/*; do
child="${childpath##*/}"
if [[ "${child}" == "doc" ||
"${child}" == "webapps" ]]; then
mkdir -p "${dest}/${child}"
cp -r "${src}/${child}"/* "${dest}/${child}"
continue;
fi
copyifnotexists "${src}/${child}" "${dest}/${child}"
done
fi
}
#Copy all contents as is except the lib.
#for libs check for existence in share directory, if not exist then only copy.
function copy()
{
declare src="$1"
declare dest="$2"
declare child
declare childpath
if [[ -d "${src}" ]]; then
for childpath in "${src}"/*; do
child="${childpath##*/}"
if [[ "${child}" == "share" ]]; then
copyifnotexists "${src}/${child}" "${dest}/${child}"
else
if [[ -d "${src}/${child}" ]]; then
mkdir -p "${dest}/${child}"
cp -pr "${src}/${child}"/* "${dest}/${child}"
else
cp -pr "${src}/${child}" "${dest}/${child}"
fi
fi
done
fi
}
# shellcheck disable=SC2164
ROOT=$(cd "${BASEDIR}"/../..;pwd)
echo
echo "Current directory $(pwd)"
echo
run rm -rf "hadoop-${VERSION}"
run mkdir "hadoop-${VERSION}"
run cd "hadoop-${VERSION}"
run cp -p "${ROOT}/LICENSE.txt" .
run cp -p "${ROOT}/NOTICE.txt" .
run cp -p "${ROOT}/README.txt" .
# Copy hadoop-common first so that it have always have all dependencies.
# Remaining projects will copy only libraries which are not present already in 'share' directory.
run copy "${ROOT}/hadoop-common-project/hadoop-common/target/hadoop-common-${VERSION}" .
run copy "${ROOT}/hadoop-common-project/hadoop-nfs/target/hadoop-nfs-${VERSION}" .
run copy "${ROOT}/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${VERSION}" .
run copy "${ROOT}/hadoop-hdfs-project/hadoop-hdfs-nfs/target/hadoop-hdfs-nfs-${VERSION}" .
run copy "${ROOT}/hadoop-yarn-project/target/hadoop-yarn-project-${VERSION}" .
run copy "${ROOT}/hadoop-mapreduce-project/target/hadoop-mapreduce-${VERSION}" .
run copy "${ROOT}/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${VERSION}" .
#copy httpfs and kms as is
run cp -pr "${ROOT}/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${VERSION}"/* .
run cp -pr "${ROOT}/hadoop-common-project/hadoop-kms/target/hadoop-kms-${VERSION}"/* .
echo
echo "Hadoop dist layout available at: ${BASEDIR}/hadoop-${VERSION}"
echo

View File

@ -0,0 +1,44 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# project.version
VERSION=$1
# project.build.directory
BASEDIR=$2
function run()
{
declare res
echo "\$ ${*}"
"${@}"
res=$?
if [[ ${res} != 0 ]]; then
echo
echo "Failed!"
echo
exit "${res}"
fi
}
run tar cf "hadoop-${VERSION}.tar" "hadoop-${VERSION}"
run gzip -f "hadoop-${VERSION}.tar"
echo
echo "Hadoop dist tar available at: ${BASEDIR}/hadoop-${VERSION}.tar.gz"
echo

View File

@ -83,151 +83,39 @@
<build> <build>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.codehaus.mojo</groupId>
<artifactId>maven-antrun-plugin</artifactId> <artifactId>exec-maven-plugin</artifactId>
<executions> <executions>
<execution> <execution>
<id>dist</id> <id>dist</id>
<phase>prepare-package</phase> <phase>prepare-package</phase>
<goals> <goals>
<goal>run</goal> <goal>exec</goal>
</goals> </goals>
<configuration> <configuration>
<target> <executable>${basedir}/../dev-support/bin/dist-layout-stitching</executable>
<echo file="${project.build.directory}/dist-layout-stitching.sh"> <workingDirectory>${project.build.directory}</workingDirectory>
run() { <requiresOnline>false</requiresOnline>
echo "\$ ${@}" <arguments>
"${@}" <argument>${project.version}</argument>
res=$? <argument>${project.build.directory}</argument>
if [ $res != 0 ]; then </arguments>
echo
echo "Failed!"
echo
exit $res
fi
}
findFileInDir(){
local file="$1";
local dir="${2:-./share}";
local count=$(find "$dir" -iname "$file"|wc -l)
echo "$count";
}
copyIfNotExists(){
local src="$1"
local srcName=$(basename "$src")
local dest="$2";
if [ -f "$src" ]; then
if [[ "$srcName" != *.jar ]] || [ $(findFileInDir "$srcName") -eq "0" ]; then
local destDir=$(dirname "$dest")
mkdir -p "$destDir"
cp "$src" "$dest"
fi
else
for childPath in "$src"/* ;
do
child=$(basename "$childPath");
if [ "$child" == "doc" ] || [ "$child" == "webapps" ]; then
mkdir -p "$dest"/"$child"
cp -r "$src"/"$child"/* "$dest"/"$child"
continue;
fi
copyIfNotExists "$src"/"$child" "$dest"/"$child"
done
fi
}
#Copy all contents as is except the lib.
#for libs check for existence in share directory, if not exist then only copy.
copy(){
local src="$1";
local dest="$2";
if [ -d "$src" ]; then
for childPath in "$src"/* ;
do
child=$(basename "$childPath");
if [ "$child" == "share" ]; then
copyIfNotExists "$src"/"$child" "$dest"/"$child"
else
if [ -d "$src"/"$child" ]; then
mkdir -p "$dest"/"$child"
cp -r "$src"/"$child"/* "$dest"/"$child"
else
cp -r "$src"/"$child" "$dest"/"$child"
fi
fi
done
fi
}
# Shellcheck SC2086
ROOT=$(cd "${project.build.directory}"/../..;pwd)
echo
echo "Current directory $(pwd)"
echo
run rm -rf hadoop-${project.version}
run mkdir hadoop-${project.version}
run cd hadoop-${project.version}
run cp "$ROOT"/LICENSE.txt .
run cp "$ROOT"/NOTICE.txt .
run cp "$ROOT"/README.txt .
# Copy hadoop-common first so that it have always have all dependencies.
# Remaining projects will copy only libraries which are not present already in 'share' directory.
run copy "$ROOT"/hadoop-common-project/hadoop-common/target/hadoop-common-${project.version} .
run copy "$ROOT"/hadoop-common-project/hadoop-nfs/target/hadoop-nfs-${project.version} .
run copy "$ROOT"/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${project.version} .
run copy "$ROOT"/hadoop-hdfs-project/hadoop-hdfs-nfs/target/hadoop-hdfs-nfs-${project.version} .
run copy "$ROOT"/hadoop-yarn-project/target/hadoop-yarn-project-${project.version} .
run copy "$ROOT"/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version} .
run copy "$ROOT"/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${project.version} .
#copy httpfs and kms as is
run cp -r "$ROOT"/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${project.version}/* .
run cp -r "$ROOT"/hadoop-common-project/hadoop-kms/target/hadoop-kms-${project.version}/* .
echo
echo "Hadoop dist layout available at: ${project.build.directory}/hadoop-${project.version}"
echo
</echo>
<exec executable="${shell-executable}" dir="${project.build.directory}" failonerror="true">
<arg line="./dist-layout-stitching.sh"/>
</exec>
</target>
</configuration> </configuration>
</execution> </execution>
<execution> <execution>
<id>tar</id> <id>tar</id>
<phase>package</phase> <phase>package</phase>
<goals> <goals>
<goal>run</goal> <goal>exec</goal>
</goals> </goals>
<configuration> <configuration>
<target if="tar"> <executable>${basedir}/../dev-support/bin/dist-tar-stitching</executable>
<echo file="${project.build.directory}/dist-tar-stitching.sh"> <workingDirectory>${project.build.directory}</workingDirectory>
run() { <requiresOnline>false</requiresOnline>
echo "\$ ${@}" <arguments>
"${@}" <argument>${project.version}</argument>
res=$? <argument>${project.build.directory}</argument>
if [ $res != 0 ]; then </arguments>
echo
echo "Failed!"
echo
exit $res
fi
}
run tar cf hadoop-${project.version}.tar hadoop-${project.version}
run gzip -f hadoop-${project.version}.tar
echo
echo "Hadoop dist tar available at: ${project.build.directory}/hadoop-${project.version}.tar.gz"
echo
</echo>
<exec executable="${shell-executable}" dir="${project.build.directory}" failonerror="true">
<arg line="./dist-tar-stitching.sh"/>
</exec>
</target>
</configuration> </configuration>
</execution> </execution>
</executions> </executions>