Compare commits
3 Commits
Author | SHA1 | Date |
---|---|---|
Márton Elek | 06d125c9ab | |
Márton Elek | 215cbbf169 | |
Márton Elek | def2e43812 |
|
@ -14,6 +14,10 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
cd "$DIR/../../.." || exit 1
|
||||
|
||||
export HADOOP_VERSION=3
|
||||
"$DIR/../../../hadoop-ozone/dist/target/ozone-*-SNAPSHOT/compose/test-all.sh"
|
||||
OZONE_VERSION=$(grep "<ozone.version>" "$DIR/../../pom.xml" | sed 's/<[^>]*>//g'| sed 's/^[ \t]*//')
|
||||
cd "$DIR/../../dist/target/ozone-$OZONE_VERSION/compose" || exit 1
|
||||
./test-all.sh
|
||||
exit $?
|
||||
|
|
|
@ -13,10 +13,12 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
mkdir -p ./target
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
cd "$DIR/../../.." || exit 1
|
||||
|
||||
grep -r --include="*.java" "@author" .
|
||||
if [ $? -gt 0 ]; then
|
||||
if grep -r --include="*.java" "@author" .; then
|
||||
exit 0
|
||||
else
|
||||
exit -1
|
||||
exit 1
|
||||
fi
|
||||
|
|
|
@ -13,6 +13,9 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
cd "$DIR/../../.." || exit 1
|
||||
|
||||
export MAVEN_OPTS="-Xmx4096m"
|
||||
mvn -am -pl :hadoop-ozone-dist -P hdds -Dmaven.javadoc.skip=true -DskipTests clean install
|
||||
mvn -B -f pom.ozone.xml -Dmaven.javadoc.skip=true -DskipTests clean install
|
||||
exit $?
|
||||
|
|
|
@ -13,11 +13,17 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
mvn -fn checkstyle:check -am -pl :hadoop-ozone-dist -Phdds
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
cd "$DIR/../../.." || exit 1
|
||||
|
||||
mvn -B -fn checkstyle:check -f pom.ozone.xml
|
||||
|
||||
#Print out the exact violations with parsing XML results with sed
|
||||
find "." -name checkstyle-errors.xml -print0 | xargs -0 sed '$!N; /<file.*\n<\/file/d;P;D' | sed '/<\/.*/d;/<checkstyle.*/d;s/<error.*line="\([[:digit:]]*\)".*message="\([^"]\+\).*/ \1: \2/;s/<file name="\([^"]*\)".*/\1/;/<\?xml.*>/d'
|
||||
|
||||
violations=$(grep -r error --include checkstyle-errors.xml .| wc -l)
|
||||
if [[ $violations -gt 0 ]]; then
|
||||
echo "There are $violations checkstyle violations"
|
||||
exit -1
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
||||
|
|
|
@ -13,6 +13,8 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
cd "$DIR/../../.." || exit 1
|
||||
|
||||
FINDBUGS_ALL_FILE=./target/findbugs-all.txt
|
||||
|
||||
|
@ -20,15 +22,15 @@ mkdir -p ./target
|
|||
rm "$FINDBUGS_ALL_FILE" || true
|
||||
touch "$FINDBUGS_ALL_FILE"
|
||||
|
||||
mvn -fn findbugs:check -Dfindbugs.failOnError=false -am -pl :hadoop-ozone-dist -Phdds
|
||||
mvn -B compile -fn findbugs:check -Dfindbugs.failOnError=false -f pom.ozone.xml
|
||||
|
||||
find hadoop-ozone -name findbugsXml.xml | xargs -n1 convertXmlToText | tee -a "${FINDBUGS_ALL_FILE}"
|
||||
find hadoop-hdds -name findbugsXml.xml | xargs -n1 convertXmlToText | tee -a "${FINDBUGS_ALL_FILE}"
|
||||
find hadoop-ozone -name findbugsXml.xml -print0 | xargs -0 -n1 convertXmlToText | tee -a "${FINDBUGS_ALL_FILE}"
|
||||
find hadoop-hdds -name findbugsXml.xml -print0 | xargs -0 -n1 convertXmlToText | tee -a "${FINDBUGS_ALL_FILE}"
|
||||
|
||||
bugs=$(cat "$FINDBUGS_ALL_FILE" | wc -l)
|
||||
bugs=$(wc -l < "$FINDBUGS_ALL_FILE")
|
||||
|
||||
if [[ ${bugs} -gt 0 ]]; then
|
||||
exit -1
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
#!/usr/bin/env bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
cd "$DIR/../../.." || exit 1
|
||||
|
||||
export MAVEN_OPTS="-Xmx4096m"
|
||||
mvn -B install -f pom.ozone.xml -DskipTests
|
||||
mvn -B -fn test -f pom.ozone.xml -pl :hadoop-ozone-integration-test,:hadoop-ozone-filesystem
|
||||
module_failed_tests=$(find "." -name 'TEST*.xml' -print0 \
|
||||
| xargs -0 -n1 "grep" -l -E "<failure|<error"\
|
||||
| awk -F/ '{sub("'"TEST-JUNIT_TEST_OUTPUT_DIR"'",""); sub(".xml",""); print $NF}')
|
||||
if [[ -n "${module_failed_tests}" ]] ; then
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
|
@ -13,12 +13,15 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
hadooplines=$(git diff --name-only HEAD~1..HEAD | grep -v hadoop-ozone | grep -v hadoop-hdds | wc -l )
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
cd "$DIR/../../.." || exit 1
|
||||
|
||||
hadooplines=$(git diff --name-only HEAD~1..HEAD | grep -v hadoop-ozone | grep -c -v hadoop-hdds )
|
||||
if [ "$hadooplines" == "0" ]; then
|
||||
echo "Only ozone/hdds subprojects are changed"
|
||||
exit 0
|
||||
else
|
||||
echo "Main hadoop projects are changed in an ozone patch."
|
||||
echo "Please do it in a HADOOP/HDFS patch and test it with hadoop precommit tests"
|
||||
exit -1
|
||||
exit 1
|
||||
fi
|
||||
|
|
|
@ -13,12 +13,17 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
cd "$DIR/../../.." || exit 1
|
||||
|
||||
mkdir -p target
|
||||
rm target/rat-aggregated.txt
|
||||
mvn -fn org.apache.rat:apache-rat-plugin:0.13:check -am -pl :hadoop-ozone-dist -Phdds
|
||||
cd hadoop-hdds || exit 1
|
||||
mvn -B -fn org.apache.rat:apache-rat-plugin:0.13:check
|
||||
cd ../hadoop-ozone || exit 1
|
||||
mvn -B -fn org.apache.rat:apache-rat-plugin:0.13:check
|
||||
grep -r --include=rat.txt "!????" | tee ./target/rat-aggregated.txt
|
||||
if [ "$(cat target/rat-aggregated.txt)" ]; then
|
||||
exit -1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
#!/usr/bin/env bash
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
cd "$DIR/../../.." || exit 1
|
||||
|
||||
OUTPUT_FILE="$DIR/../../../target/shell-problems.txt"
|
||||
mkdir -p "$(dirname "$OUTPUT_FILE")"
|
||||
echo "" > "$OUTPUT_FILE"
|
||||
find "./hadoop-hdds" -type f -executable | grep -v target | grep -v node_modules | grep -v py | xargs -n1 shellcheck | tee "$OUTPUT_FILE"
|
||||
find "./hadoop-ozone" -type f -executable | grep -v target | grep -v node_modules | grep -v py | xargs -n1 shellcheck | tee "$OUTPUT_FILE"
|
||||
|
||||
|
||||
if [ "$(cat "$OUTPUT_FILE")" ]; then
|
||||
exit 1
|
||||
fi
|
|
@ -14,11 +14,11 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
export MAVEN_OPTS="-Xmx4096m"
|
||||
mvn -fn test -am -pl :hadoop-ozone-dist -P hdds
|
||||
module_failed_tests=$(find "." -name 'TEST*.xml'\
|
||||
| xargs "grep" -l -E "<failure|<error"\
|
||||
#mvn -fn test -f pom.ozone.xml -pl \!:hadoop-ozone-integration-test,\!:hadoop-ozone-filesystem
|
||||
module_failed_tests=$(find "." -name 'TEST*.xml' -print0 \
|
||||
| xargs -n1 -0 "grep" -l -E "<failure|<error"\
|
||||
| awk -F/ '{sub("'"TEST-JUNIT_TEST_OUTPUT_DIR"'",""); sub(".xml",""); print $NF}')
|
||||
if [[ -n "${module_failed_tests}" ]] ; then
|
||||
exit -1
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
||||
|
|
|
@ -75,7 +75,7 @@ class ClusterUtils(object):
|
|||
freon_client='om'):
|
||||
# run freon
|
||||
cmd = "docker-compose -f %s " \
|
||||
"exec %s /opt/hadoop/bin/ozone " \
|
||||
"exec -T %s /opt/hadoop/bin/ozone " \
|
||||
"freon rk " \
|
||||
"--numOfVolumes %s " \
|
||||
"--numOfBuckets %s " \
|
||||
|
@ -116,7 +116,7 @@ class ClusterUtils(object):
|
|||
@classmethod
|
||||
def get_ozone_confkey_value(cls, docker_compose_file, key_name):
|
||||
cmd = "docker-compose -f %s " \
|
||||
"exec om /opt/hadoop/bin/ozone " \
|
||||
"exec -T om /opt/hadoop/bin/ozone " \
|
||||
"getconf -confKey %s" \
|
||||
% (docker_compose_file, key_name)
|
||||
exit_code, output = cls.run_cmd(cmd)
|
||||
|
@ -131,7 +131,7 @@ class ClusterUtils(object):
|
|||
"""
|
||||
ozone_metadata_dir = cls.get_ozone_confkey_value(docker_compose_file,
|
||||
"ozone.metadata.dirs")
|
||||
cmd = "docker-compose -f %s exec scm cat %s/scm/current/VERSION" % \
|
||||
cmd = "docker-compose -f %s exec -T scm cat %s/scm/current/VERSION" % \
|
||||
(docker_compose_file, ozone_metadata_dir)
|
||||
exit_code, output = cls.run_cmd(cmd)
|
||||
assert exit_code == 0, "get scm UUID failed with output=[%s]" % output
|
||||
|
@ -158,7 +158,7 @@ class ClusterUtils(object):
|
|||
scm_uuid = cls.find_scm_uuid(docker_compose_file)
|
||||
container_parent_path = "%s/hdds/%s/current/containerDir0" % \
|
||||
(datanode_dir, scm_uuid)
|
||||
cmd = "docker-compose -f %s exec --index=%s datanode find %s -type f " \
|
||||
cmd = "docker-compose -T -f %s exec --index=%s datanode find %s -type f " \
|
||||
"-name '*.container'" \
|
||||
% (docker_compose_file, datanode_index, container_parent_path)
|
||||
exit_code, output = cls.run_cmd(cmd)
|
||||
|
@ -166,7 +166,7 @@ class ClusterUtils(object):
|
|||
if exit_code == 0 and output:
|
||||
container_list = map(str.strip, output.split("\n"))
|
||||
for container_path in container_list:
|
||||
cmd = "docker-compose -f %s exec --index=%s datanode cat %s" \
|
||||
cmd = "docker-compose -f %s exec -T --index=%s datanode cat %s" \
|
||||
% (docker_compose_file, datanode_index, container_path)
|
||||
exit_code, output = cls.run_cmd(cmd)
|
||||
assert exit_code == 0, \
|
||||
|
@ -205,7 +205,7 @@ class ClusterUtils(object):
|
|||
@classmethod
|
||||
def create_volume(cls, docker_compose_file, volume_name):
|
||||
command = "docker-compose -f %s " \
|
||||
"exec ozone_client /opt/hadoop/bin/ozone " \
|
||||
"exec -T ozone_client /opt/hadoop/bin/ozone " \
|
||||
"sh volume create /%s --user root" % \
|
||||
(docker_compose_file, volume_name)
|
||||
logger.info("Creating Volume %s", volume_name)
|
||||
|
@ -216,7 +216,7 @@ class ClusterUtils(object):
|
|||
@classmethod
|
||||
def delete_volume(cls, docker_compose_file, volume_name):
|
||||
command = "docker-compose -f %s " \
|
||||
"exec ozone_client /opt/hadoop/bin/ozone " \
|
||||
"exec -T ozone_client /opt/hadoop/bin/ozone " \
|
||||
"sh volume delete /%s" % (docker_compose_file, volume_name)
|
||||
logger.info("Deleting Volume %s", volume_name)
|
||||
exit_code, output = cls.run_cmd(command)
|
||||
|
@ -225,7 +225,7 @@ class ClusterUtils(object):
|
|||
@classmethod
|
||||
def create_bucket(cls, docker_compose_file, bucket_name, volume_name):
|
||||
command = "docker-compose -f %s " \
|
||||
"exec ozone_client /opt/hadoop/bin/ozone " \
|
||||
"exec -T ozone_client /opt/hadoop/bin/ozone " \
|
||||
"sh bucket create /%s/%s" % (docker_compose_file,
|
||||
volume_name, bucket_name)
|
||||
logger.info("Creating Bucket %s in volume %s",
|
||||
|
@ -237,7 +237,7 @@ class ClusterUtils(object):
|
|||
@classmethod
|
||||
def delete_bucket(cls, docker_compose_file, bucket_name, volume_name):
|
||||
command = "docker-compose -f %s " \
|
||||
"exec ozone_client /opt/hadoop/bin/ozone " \
|
||||
"exec -T ozone_client /opt/hadoop/bin/ozone " \
|
||||
"sh bucket delete /%s/%s" % (docker_compose_file,
|
||||
volume_name, bucket_name)
|
||||
logger.info("Running delete bucket of %s/%s", volume_name, bucket_name)
|
||||
|
@ -248,13 +248,13 @@ class ClusterUtils(object):
|
|||
def put_key(cls, docker_compose_file, bucket_name, volume_name,
|
||||
filepath, key_name=None, replication_factor=None):
|
||||
command = "docker-compose -f %s " \
|
||||
"exec ozone_client ls %s" % (docker_compose_file, filepath)
|
||||
"exec -T ozone_client ls %s" % (docker_compose_file, filepath)
|
||||
exit_code, output = cls.run_cmd(command)
|
||||
assert exit_code == 0, "%s does not exist" % filepath
|
||||
if key_name is None:
|
||||
key_name = os.path.basename(filepath)
|
||||
command = "docker-compose -f %s " \
|
||||
"exec ozone_client /opt/hadoop/bin/ozone " \
|
||||
"exec ozone_client -T /opt/hadoop/bin/ozone " \
|
||||
"sh key put /%s/%s/%s %s" % (docker_compose_file,
|
||||
volume_name, bucket_name,
|
||||
key_name, filepath)
|
||||
|
@ -269,7 +269,7 @@ class ClusterUtils(object):
|
|||
def delete_key(cls, docker_compose_file, bucket_name, volume_name,
|
||||
key_name):
|
||||
command = "docker-compose -f %s " \
|
||||
"exec ozone_client /opt/hadoop/bin/ozone " \
|
||||
"exec -T ozone_client /opt/hadoop/bin/ozone " \
|
||||
"sh key delete /%s/%s/%s" \
|
||||
% (docker_compose_file, volume_name, bucket_name, key_name)
|
||||
logger.info("Running delete key %s in %s/%s",
|
||||
|
@ -283,7 +283,7 @@ class ClusterUtils(object):
|
|||
if filepath is None:
|
||||
filepath = '.'
|
||||
command = "docker-compose -f %s " \
|
||||
"exec ozone_client /opt/hadoop/bin/ozone " \
|
||||
"exec -T ozone_client /opt/hadoop/bin/ozone " \
|
||||
"sh key get /%s/%s/%s %s" % (docker_compose_file,
|
||||
volume_name, bucket_name,
|
||||
key_name, filepath)
|
||||
|
@ -299,7 +299,7 @@ class ClusterUtils(object):
|
|||
Before running any 'putKey' operation, this function is called to store
|
||||
the original checksum of the file. The file is then uploaded as a key.
|
||||
"""
|
||||
command = "docker-compose -f %s " \
|
||||
command = "docker-compose -T -f %s " \
|
||||
"exec %s md5sum %s" % \
|
||||
(docker_compose_file, client, filepath)
|
||||
exit_code, output = cls.run_cmd(command)
|
||||
|
@ -318,7 +318,7 @@ class ClusterUtils(object):
|
|||
@classmethod
|
||||
def get_pipelines(cls, docker_compose_file):
|
||||
command = "docker-compose -f %s " \
|
||||
+ "exec ozone_client /opt/hadoop/bin/ozone scmcli " \
|
||||
+ "exec -T ozone_client /opt/hadoop/bin/ozone scmcli " \
|
||||
+ "listPipelines" % (docker_compose_file)
|
||||
exit_code, output = cls.run_cmd(command)
|
||||
assert exit_code == 0, "list pipeline command failed"
|
||||
|
|
Loading…
Reference in New Issue