HDDS-40. Separating packaging of Ozone/HDDS from the main Hadoop.

Contributed by Elek, Marton.
This commit is contained in:
Anu Engineer 2018-05-11 13:52:05 -07:00
parent 50408cfc69
commit 4b4f24ad5f
15 changed files with 394 additions and 115 deletions

5
.gitignore vendored
View File

@ -48,3 +48,8 @@ patchprocess/
.history/
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-ui/src/main/webapp/package-lock.json
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-ui/src/main/webapp/yarn-error.log
#robotframework outputs
log.html
output.xml
report.html

View File

@ -146,21 +146,6 @@ run cp -p "${ROOT}/hadoop-client-modules/hadoop-client-api/target/hadoop-client-
run cp -p "${ROOT}/hadoop-client-modules/hadoop-client-runtime/target/hadoop-client-runtime-${VERSION}.jar" share/hadoop/client/
run cp -p "${ROOT}/hadoop-client-modules/hadoop-client-minicluster/target/hadoop-client-minicluster-${VERSION}.jar" share/hadoop/client/
# HDDS
run copy "${ROOT}/hadoop-hdds/common/target/hadoop-hdds-common-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-hdds/framework/target/hadoop-hdds-server-framework-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-hdds/server-scm/target/hadoop-hdds-server-scm-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-hdds/container-service/target/hadoop-hdds-container-service-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-hdds/client/target/hadoop-hdds-client-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-hdds/tools/target/hadoop-hdds-tools-${HDDS_VERSION}" .
# Ozone
run copy "${ROOT}/hadoop-ozone/common/target/hadoop-ozone-common-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-ozone/ozone-manager/target/hadoop-ozone-ozone-manager-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-ozone/objectstore-service/target/hadoop-ozone-objectstore-service-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-ozone/client/target/hadoop-ozone-client-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-ozone/tools/target/hadoop-ozone-tools-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${VERSION}" .

View File

@ -0,0 +1,153 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# project.version
VERSION=$1
# project.build.directory
BASEDIR=$2
#hdds.version
HDDS_VERSION=$3
function run()
{
declare res
echo "\$ ${*}"
"${@}"
res=$?
if [[ ${res} != 0 ]]; then
echo
echo "Failed!"
echo
exit "${res}"
fi
}
function findfileindir()
{
declare file="$1"
declare dir="${2:-./share}"
declare count
count=$(find "${dir}" -iname "${file}" | wc -l)
#shellcheck disable=SC2086
echo ${count}
}
function copyifnotexists()
{
declare src="$1"
declare dest="$2"
declare srcname
declare destdir
declare child
declare childpath
if [[ -f "${src}" ]]; then
srcname=${src##*/}
if [[ "${srcname}" != *.jar ||
$(findfileindir "${srcname}") -eq "0" ]]; then
destdir=$(dirname "${dest}")
mkdir -p "${destdir}"
cp -p "${src}" "${dest}"
fi
else
for childpath in "${src}"/*; do
child="${childpath##*/}"
if [[ "${child}" == "doc" ||
"${child}" == "webapps" ]]; then
mkdir -p "${dest}/${child}"
cp -r "${src}/${child}"/* "${dest}/${child}"
continue;
fi
copyifnotexists "${src}/${child}" "${dest}/${child}"
done
fi
}
#Copy all contents as is except the lib.
#for libs check for existence in share directory, if not exist then only copy.
function copy()
{
declare src="$1"
declare dest="$2"
declare child
declare childpath
if [[ -d "${src}" ]]; then
for childpath in "${src}"/*; do
child="${childpath##*/}"
if [[ "${child}" == "share" ]]; then
copyifnotexists "${src}/${child}" "${dest}/${child}"
else
if [[ -d "${src}/${child}" ]]; then
mkdir -p "${dest}/${child}"
cp -pr "${src}/${child}"/* "${dest}/${child}"
else
cp -pr "${src}/${child}" "${dest}/${child}"
fi
fi
done
fi
}
# shellcheck disable=SC2164
ROOT=$(cd "${BASEDIR}"/../..;pwd)
echo
echo "Current directory $(pwd)"
echo
run rm -rf "ozone"
run mkdir "ozone"
run cd "ozone"
run cp -p "${ROOT}/LICENSE.txt" .
run cp -p "${ROOT}/NOTICE.txt" .
run cp -p "${ROOT}/README.txt" .
# Copy hadoop-common first so that it have always have all dependencies.
# Remaining projects will copy only libraries which are not present already in 'share' directory.
run copy "${ROOT}/hadoop-common-project/hadoop-common/target/hadoop-common-${VERSION}" .
run copy "${ROOT}/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${VERSION}" .
run copy "${ROOT}/hadoop-hdfs-project/hadoop-hdfs-client/target/hadoop-hdfs-client-${VERSION}" .
# HDDS
run copy "${ROOT}/hadoop-hdds/common/target/hadoop-hdds-common-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-hdds/framework/target/hadoop-hdds-server-framework-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-hdds/server-scm/target/hadoop-hdds-server-scm-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-hdds/container-service/target/hadoop-hdds-container-service-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-hdds/client/target/hadoop-hdds-client-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-hdds/tools/target/hadoop-hdds-tools-${HDDS_VERSION}" .
# Ozone
run copy "${ROOT}/hadoop-ozone/common/target/hadoop-ozone-common-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-ozone/ozone-manager/target/hadoop-ozone-ozone-manager-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-ozone/objectstore-service/target/hadoop-ozone-objectstore-service-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-ozone/client/target/hadoop-ozone-client-${HDDS_VERSION}" .
run copy "${ROOT}/hadoop-ozone/tools/target/hadoop-ozone-tools-${HDDS_VERSION}" .
mkdir -p ./share/hadoop/mapreduce
mkdir -p ./share/hadoop/yarn
echo
echo "Hadoop Ozone dist layout available at: ${BASEDIR}/ozone-${HDDS_VERSION}"
echo

View File

@ -0,0 +1,48 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# project.version
VERSION=$1
# project.build.directory
BASEDIR=$2
function run()
{
declare res
echo "\$ ${*}"
"${@}"
res=$?
if [[ ${res} != 0 ]]; then
echo
echo "Failed!"
echo
exit "${res}"
fi
}
#To make the final dist directory easily mountable from docker we don't use
#version name in the directory name.
#To include the version name in the root directory of the tar file
# we create a symbolic link and dereference it during the tar creation
ln -s -f ozone ozone-${VERSION}
run tar -c --dereference -f "ozone-${VERSION}.tar" "ozone"
run gzip -f "ozone-${VERSION}.tar"
echo
echo "Ozone dist tar available at: ${BASEDIR}/ozone-${VERSION}.tar.gz"
echo

View File

@ -13,8 +13,8 @@
limitations under the License. See accompanying LICENSE file.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
@ -168,10 +168,13 @@
</goals>
<configuration>
<executable>${shell-executable}</executable>
<workingDirectory>${project.build.directory}</workingDirectory>
<workingDirectory>${project.build.directory}
</workingDirectory>
<requiresOnline>false</requiresOnline>
<arguments>
<argument>${basedir}/../dev-support/bin/dist-layout-stitching</argument>
<argument>
${basedir}/../dev-support/bin/dist-layout-stitching
</argument>
<argument>${project.version}</argument>
<argument>${project.build.directory}</argument>
<argument>${hdds.version}</argument>
@ -182,14 +185,16 @@
<id>toolshooks</id>
<phase>prepare-package</phase>
<goals>
<goal>exec</goal>
<goal>exec</goal>
</goals>
<configuration>
<executable>${shell-executable}</executable>
<workingDirectory>${basedir}</workingDirectory>
<requiresOnline>false</requiresOnline>
<arguments>
<argument>${basedir}/../dev-support/bin/dist-tools-hooks-maker</argument>
<argument>
${basedir}/../dev-support/bin/dist-tools-hooks-maker
</argument>
<argument>${project.version}</argument>
<argument>${project.build.directory}</argument>
<argument>${basedir}/../hadoop-tools</argument>
@ -203,14 +208,16 @@
<goal>exec</goal>
</goals>
<configuration>
<executable>${shell-executable}</executable>
<workingDirectory>${project.build.directory}</workingDirectory>
<requiresOnline>false</requiresOnline>
<arguments>
<argument>${basedir}/../dev-support/bin/dist-tar-stitching</argument>
<argument>${project.version}</argument>
<argument>${project.build.directory}</argument>
</arguments>
<executable>${shell-executable}</executable>
<workingDirectory>${project.build.directory}
</workingDirectory>
<requiresOnline>false</requiresOnline>
<arguments>
<argument>${basedir}/../dev-support/bin/dist-tar-stitching
</argument>
<argument>${project.version}</argument>
<argument>${project.build.directory}</argument>
</arguments>
</configuration>
</execution>
</executions>
@ -218,14 +225,12 @@
</plugins>
</build>
</profile>
<profile>
<id>hdds</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-ozone-manager</artifactId>
@ -261,41 +266,86 @@
<plugins>
<plugin>
<artifactId>maven-resources-plugin</artifactId>
<executions>
<execution>
<id>copy-docker-compose</id>
<goals>
<goal>copy-resources</goal>
</goals>
<phase>prepare-package</phase>
<configuration>
<outputDirectory>${project.build.directory}/compose</outputDirectory>
<resources>
<resource>
<directory>src/main/compose</directory>
<filtering>true</filtering>
</resource>
</resources>
</configuration>
</execution>
<execution>
<id>copy-dockerfile</id>
<goals>
<goal>copy-resources</goal>
</goals>
<phase>prepare-package</phase>
<configuration>
<outputDirectory>${project.build.directory}</outputDirectory>
<resources>
<resource>
<directory>src/main/docker</directory>
<filtering>true</filtering>
</resource>
</resources>
</configuration>
</execution>
</executions>
</plugin>
<executions>
<execution>
<id>copy-docker-compose</id>
<goals>
<goal>copy-resources</goal>
</goals>
<phase>prepare-package</phase>
<configuration>
<outputDirectory>${project.build.directory}/compose
</outputDirectory>
<resources>
<resource>
<directory>src/main/compose</directory>
<filtering>true</filtering>
</resource>
</resources>
</configuration>
</execution>
<execution>
<id>copy-dockerfile</id>
<goals>
<goal>copy-resources</goal>
</goals>
<phase>prepare-package</phase>
<configuration>
<outputDirectory>${project.build.directory}</outputDirectory>
<resources>
<resource>
<directory>src/main/docker</directory>
<filtering>true</filtering>
</resource>
</resources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<executions>
<execution>
<id>dist-ozone</id>
<phase>prepare-package</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${shell-executable}</executable>
<workingDirectory>${project.build.directory}
</workingDirectory>
<arguments>
<argument>
${basedir}/../dev-support/bin/ozone-dist-layout-stitching
</argument>
<argument>${project.version}</argument>
<argument>${project.build.directory}</argument>
<argument>${hdds.version}</argument>
</arguments>
</configuration>
</execution>
<execution>
<id>tar-ozone</id>
<phase>package</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${shell-executable}</executable>
<workingDirectory>${project.build.directory}
</workingDirectory>
<arguments>
<argument>${basedir}/../dev-support/bin/ozone-dist-tar-stitching
</argument>
<argument>${hdds.version}</argument>
<argument>${project.build.directory}</argument>
</arguments>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>

View File

@ -14,4 +14,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
VERSION=${project.version}
HDDS_VERSION=${hdds.version}

View File

@ -20,7 +20,7 @@ services:
image: apache/hadoop-runner
hostname: namenode
volumes:
- ../..//hadoop-${VERSION}:/opt/hadoop
- ../../ozone:/opt/hadoop
ports:
- 9870:9870
environment:
@ -31,7 +31,7 @@ services:
datanode:
image: apache/hadoop-runner
volumes:
- ../..//hadoop-${VERSION}:/opt/hadoop
- ../../ozone:/opt/hadoop
ports:
- 9864
command: ["/opt/hadoop/bin/ozone","datanode"]
@ -40,7 +40,7 @@ services:
ksm:
image: apache/hadoop-runner
volumes:
- ../..//hadoop-${VERSION}:/opt/hadoop
- ../../ozone:/opt/hadoop
ports:
- 9874:9874
environment:
@ -51,7 +51,7 @@ services:
scm:
image: apache/hadoop-runner
volumes:
- ../..//hadoop-${VERSION}:/opt/hadoop
- ../../ozone:/opt/hadoop
ports:
- 9876:9876
env_file:

View File

@ -112,7 +112,7 @@ public final class NameNodeUtils {
}
if (port > 0) {
return currentNnAddress;
return currentNnAddress;
} else {
// the port is missing or 0. Figure out real bind address later.
return null;

View File

@ -20,19 +20,29 @@ This project contains acceptance tests for ozone/hdds using docker-compose and [
To run the acceptance tests, please activate the `ozone-acceptance-test` profile and do a full build.
Typically you need a `mvn install -Phdds,ozone-acceptance-test,dist -DskipTests` for a build without unit tests but with acceptance test.
```
mvn clean install -Pdist -Phdds
cd hadoop-ozone/acceptance-test
mvn integration-test -Phdds,ozone-acceptance-test,dist -DskipTests
```
Notes:
1. You need a hadoop build in hadoop-dist/target directory.
2. The `ozone-acceptance-test` could be activated with profile even if the unit tests are disabled.
3. This method does not require the robot framework on path as jpython is used.
## Development
You can run manually the robot tests with `robot` cli. (See robotframework docs to install it.)
You can also run manually the robot tests with `robot` cli.
(See robotframework docs to install it: http://robotframework.org/robotframework/latest/RobotFrameworkUserGuide.html#installation-instructions)
1. Go to the `src/test/robotframework`
2. Execute `robot -v basedir:${PWD}/../../.. -v VERSION:3.2.0-SNAPSHOT .`
In the dev-support directory we have two wrapper scripts to run robot framework with local robot cli
instead of calling it from maven.
You can also use select just one test with -t `"*testnamefragment*"`
It's useful during the development of the robot files as any robotframework cli
arguments could be used.
1. `dev-support/bin/robot.sh` is the simple wrapper. The .robot file should be used as an argument.
2. `dev-support/bin/robot-all.sh` will call the robot.sh with the main acceptance test directory,
which means all the acceptance tests will be executed.

View File

@ -0,0 +1,18 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
$DIR/robot.sh $DIR/../../src/test/robotframework/acceptance

View File

@ -0,0 +1,38 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
#basedir is the directory of the whole hadoop project. Used to calculate the
#exact path to the hadoop-dist project
BASEDIR=${DIR}/../../../..
if [ ! "$(which robot)" ] ; then
echo ""
echo "robot is not on your PATH."
echo ""
echo "Please install it according to the documentation:"
echo " http://robotframework.org/robotframework/latest/RobotFrameworkUserGuide.html#installation-instructions"
echo " (TLDR; most of the time you need: 'pip install robotframework')"
exit -1
fi
OZONEDISTDIR="$BASEDIR/hadoop-dist/target/ozone"
if [ ! -d "$OZONEDISTDIR" ]; then
echo "Ozone can't be found in the $OZONEDISTDIR."
echo "You may need a full build with -Phdds and -Pdist profiles"
exit -1
fi
robot -v basedir:$BASEDIR $@

View File

@ -28,32 +28,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<description>Apache Hadoop Ozone Acceptance Tests</description>
<name>Apache Hadoop Ozone Acceptance Tests</name>
<packaging>pom</packaging>
<build>
<plugins>
<plugin>
<artifactId>maven-resources-plugin</artifactId>
<executions>
<execution>
<id>copy-docker-compose</id>
<goals>
<goal>copy-resources</goal>
</goals>
<phase>process-test-resources</phase>
<configuration>
<outputDirectory>${project.build.directory}/compose
</outputDirectory>
<resources>
<resource>
<directory>src/test/compose</directory>
<filtering>true</filtering>
</resource>
</resources>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>ozone-acceptance-test</id>
@ -70,8 +44,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
</goals>
<configuration>
<variables>
<variable>version:${project.version}</variable>
<variable>basedir:${project.basedir}</variable>
<variable>basedir:${project.basedir}/../..</variable>
</variables>
<skip>false</skip>
<skipTests>false</skipTests>

View File

@ -14,4 +14,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
HADOOPDIR=../../hadoop-dist/target/hadoop-${project.version}
OZONEDIR=../../../hadoop-dist/target/ozone

View File

@ -20,7 +20,7 @@ services:
image: apache/hadoop-runner
hostname: namenode
volumes:
- ${HADOOPDIR}:/opt/hadoop
- ${OZONEDIR}:/opt/hadoop
ports:
- 9870
environment:
@ -31,7 +31,7 @@ services:
datanode:
image: apache/hadoop-runner
volumes:
- ${HADOOPDIR}:/opt/hadoop
- ${OZONEDIR}:/opt/hadoop
ports:
- 9864
command: ["/opt/hadoop/bin/ozone","datanode"]
@ -41,7 +41,7 @@ services:
image: apache/hadoop-runner
hostname: ksm
volumes:
- ${HADOOPDIR}:/opt/hadoop
- ${OZONEDIR}:/opt/hadoop
ports:
- 9874
environment:
@ -52,7 +52,7 @@ services:
scm:
image: apache/hadoop-runner
volumes:
- ${HADOOPDIR}:/opt/hadoop
- ${OZONEDIR}:/opt/hadoop
ports:
- 9876
env_file:

View File

@ -21,8 +21,7 @@ Suite Teardown Teardown Ozone Cluster
*** Variables ***
${COMMON_REST_HEADER} -H "x-ozone-user: bilbo" -H "x-ozone-version: v1" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "Authorization:OZONE root"
${version}
${basedir}
*** Test Cases ***
Daemons are running without error
@ -130,8 +129,8 @@ Execute on
Run docker compose
[arguments] ${command}
Set Environment Variable HADOOPDIR ${basedir}/../../hadoop-dist/target/hadoop-${version}
${rc} ${output} = Run And Return Rc And Output docker-compose -f ${basedir}/target/compose/docker-compose.yaml ${command}
Set Environment Variable OZONEDIR ${basedir}/hadoop-dist/target/ozone
${rc} ${output} = Run And Return Rc And Output docker-compose -f ${basedir}/hadoop-ozone/acceptance-test/src/test/compose/docker-compose.yaml ${command}
Log ${output}
Should Be Equal As Integers ${rc} 0
[return] ${rc} ${output}