diff --git a/integration-tests/build_run_cluster.sh b/integration-tests/build_run_cluster.sh index 928fdfc0b78..7da37342ab2 100755 --- a/integration-tests/build_run_cluster.sh +++ b/integration-tests/build_run_cluster.sh @@ -21,7 +21,7 @@ echo $DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH export DIR=$(cd $(dirname $0) && pwd) export HADOOP_DOCKER_DIR=$DIR/../examples/quickstart/tutorial/hadoop/docker -if [ -n "${HADOOP_VERSION}" ] && [ ${HADOOP_VERSION:0:1)} == "3" ]; then +if [ -n "${HADOOP_VERSION}" ] && [ "${HADOOP_VERSION:0:1}" == "3" ]; then export HADOOP_DOCKER_DIR=$DIR/../examples/quickstart/tutorial/hadoop3/docker fi diff --git a/integration-tests/script/copy_hadoop_resources.sh b/integration-tests/script/copy_hadoop_resources.sh index 8a442c7c466..fcd97b1d9bb 100755 --- a/integration-tests/script/copy_hadoop_resources.sh +++ b/integration-tests/script/copy_hadoop_resources.sh @@ -35,7 +35,7 @@ else fi set -e -if [ -n "${HADOOP_VERSION}" ] && [ ${HADOOP_VERSION:0:1)} == "3" ]; then +if [ -n "${HADOOP_VERSION}" ] && [ "${HADOOP_VERSION:0:1}" == "3" ]; then docker exec -t druid-it-hadoop sh -c "./usr/local/hadoop/bin/hdfs dfs -mkdir -p /user/root" docker exec -t druid-it-hadoop sh -c "./usr/local/hadoop/bin/hdfs dfs -put /usr/local/hadoop/etc/hadoop/ input" fi diff --git a/integration-tests/script/copy_resources_template.sh b/integration-tests/script/copy_resources_template.sh index 87e06df21ea..3f6eba9b085 100755 --- a/integration-tests/script/copy_resources_template.sh +++ b/integration-tests/script/copy_resources_template.sh @@ -77,7 +77,7 @@ if [ -n "$DRUID_INTEGRATION_TEST_START_HADOOP_DOCKER" ] && [ "$DRUID_INTEGRATION then ## We put same version in both commands but as we have an if, correct code path will always be executed as this is generated script. ## Remove if - if [ -n "${HADOOP_VERSION}" ] && [ ${HADOOP_VERSION:0:1)} == "3" ]; then + if [ -n "${HADOOP_VERSION}" ] && [ "${HADOOP_VERSION:0:1}" == "3" ]; then java -cp "$SHARED_DIR/docker/lib/*" -Ddruid.extensions.hadoopDependenciesDir="$SHARED_DIR/hadoop-dependencies" org.apache.druid.cli.Main tools pull-deps -h org.apache.hadoop:hadoop-client-api:${hadoop.compile.version} -h org.apache.hadoop:hadoop-client-runtime:${hadoop.compile.version} -h org.apache.hadoop:hadoop-aws:${hadoop.compile.version} -h org.apache.hadoop:hadoop-azure:${hadoop.compile.version} curl https://storage.googleapis.com/hadoop-lib/gcs/gcs-connector-hadoop3-latest.jar --output $SHARED_DIR/docker/lib/gcs-connector-hadoop3-latest.jar else