mirror of https://github.com/apache/druid.git
Add the option to start Hadoop docker container when running integration tests (#9513)
* hadoop docker it * hadoop docker container it * fix hadoop container
This commit is contained in:
parent
e7b3dd9cd1
commit
09600db8f2
|
@ -23,7 +23,7 @@ LC_ALL=C.UTF-8
|
|||
|
||||
# JAVA OPTS
|
||||
COMMON_DRUID_JAVA_OPTS=-Duser.timezone=UTC -Dfile.encoding=UTF-8 -Dlog4j.configurationFile=/shared/docker/lib/log4j2.xml
|
||||
DRUID_DEP_LIB_DIR=/shared/docker/lib/*:/usr/local/druid/lib/mysql-connector-java.jar
|
||||
DRUID_DEP_LIB_DIR=/shared/hadoop_xml/*:/shared/docker/lib/*:/usr/local/druid/lib/mysql-connector-java.jar
|
||||
|
||||
# Druid configs
|
||||
druid_auth_authenticator_basic_authorizerName=basic
|
||||
|
@ -62,3 +62,4 @@ druid_zk_service_host=druid-zookeeper-kafka
|
|||
druid_auth_basic_common_maxSyncRetries=20
|
||||
druid_indexer_logs_directory=/shared/tasklogs
|
||||
druid_sql_enable=true
|
||||
druid_extensions_hadoopDependenciesDir=/shared/hadoop-dependencies
|
|
@ -266,6 +266,7 @@
|
|||
<profile>
|
||||
<id>integration-tests</id>
|
||||
<properties>
|
||||
<start.hadoop.docker>false</start.hadoop.docker>
|
||||
<override.config.path></override.config.path>
|
||||
</properties>
|
||||
<build>
|
||||
|
@ -282,6 +283,7 @@
|
|||
<phase>pre-integration-test</phase>
|
||||
<configuration>
|
||||
<environmentVariables>
|
||||
<DRUID_INTEGRATION_TEST_START_HADOOP_DOCKER>${start.hadoop.docker}</DRUID_INTEGRATION_TEST_START_HADOOP_DOCKER>
|
||||
<DRUID_INTEGRATION_TEST_JVM_RUNTIME>${jvm.runtime}</DRUID_INTEGRATION_TEST_JVM_RUNTIME>
|
||||
<DRUID_INTEGRATION_TEST_GROUP>${groups}</DRUID_INTEGRATION_TEST_GROUP>
|
||||
<DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH>${override.config.path}</DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH>
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
|
||||
# Cleanup old images/containers
|
||||
{
|
||||
for node in druid-historical druid-coordinator druid-overlord druid-router druid-router-permissive-tls druid-router-no-client-auth-tls druid-router-custom-check-tls druid-broker druid-middlemanager druid-zookeeper-kafka druid-metadata-storage;
|
||||
for node in druid-historical druid-coordinator druid-overlord druid-router druid-router-permissive-tls druid-router-no-client-auth-tls druid-router-custom-check-tls druid-broker druid-middlemanager druid-zookeeper-kafka druid-metadata-storage druid-it-hadoop;
|
||||
do
|
||||
docker stop $node
|
||||
docker rm $node
|
||||
|
@ -29,6 +29,7 @@
|
|||
{
|
||||
# environment variables
|
||||
DIR=$(cd $(dirname $0) && pwd)
|
||||
HADOOP_DOCKER_DIR=$DIR/../examples/quickstart/tutorial/hadoop/docker
|
||||
DOCKERDIR=$DIR/docker
|
||||
SERVICE_SUPERVISORDS_DIR=$DOCKERDIR/service-supervisords
|
||||
ENVIRONMENT_CONFIGS_DIR=$DOCKERDIR/environment-configs
|
||||
|
@ -45,6 +46,8 @@
|
|||
cp -r client_tls docker/client_tls
|
||||
|
||||
# Make directories if they dont exist
|
||||
mkdir -p $SHARED_DIR/hadoop_xml
|
||||
mkdir -p $SHARED_DIR/hadoop-dependencies
|
||||
mkdir -p $SHARED_DIR/logs
|
||||
mkdir -p $SHARED_DIR/tasklogs
|
||||
|
||||
|
@ -53,6 +56,12 @@
|
|||
cp -R docker $SHARED_DIR/docker
|
||||
mvn -B dependency:copy-dependencies -DoutputDirectory=$SHARED_DIR/docker/lib
|
||||
|
||||
# Pull Hadoop dependency if needed
|
||||
if [ -n "$DRUID_INTEGRATION_TEST_START_HADOOP_DOCKER" ] && [ "$DRUID_INTEGRATION_TEST_START_HADOOP_DOCKER" == true ]
|
||||
then
|
||||
java -cp "$SHARED_DIR/docker/lib/*" -Ddruid.extensions.hadoopDependenciesDir="$SHARED_DIR/hadoop-dependencies" org.apache.druid.cli.Main tools pull-deps -h org.apache.hadoop:hadoop-client:2.8.5 -h org.apache.hadoop:hadoop-aws:2.8.5
|
||||
fi
|
||||
|
||||
# install logging config
|
||||
cp src/main/resources/log4j2.xml $SHARED_DIR/docker/lib/log4j2.xml
|
||||
|
||||
|
@ -114,6 +123,12 @@ else
|
|||
esac
|
||||
fi
|
||||
|
||||
# Build Hadoop docker if needed
|
||||
if [ -n "$DRUID_INTEGRATION_TEST_START_HADOOP_DOCKER" ] && [ "$DRUID_INTEGRATION_TEST_START_HADOOP_DOCKER" == true ]
|
||||
then
|
||||
docker build -t druid-it/hadoop:2.8.5 $HADOOP_DOCKER_DIR
|
||||
fi
|
||||
|
||||
|
||||
# Start docker containers for all Druid processes and dependencies
|
||||
{
|
||||
|
@ -149,5 +164,39 @@ fi
|
|||
|
||||
# Start Router with custom TLS cert checkers
|
||||
docker run -d --privileged --net druid-it-net --ip 172.172.172.12 ${COMMON_ENV} ${ROUTER_CUSTOM_CHECK_TLS_ENV} ${OVERRIDE_ENV} --hostname druid-router-custom-check-tls --name druid-router-custom-check-tls -p 8891:8891 -p 9091:9091 -v $SHARED_DIR:/shared -v $SERVICE_SUPERVISORDS_DIR/druid.conf:$SUPERVISORDIR/druid.conf --link druid-zookeeper-kafka:druid-zookeeper-kafka --link druid-coordinator:druid-coordinator --link druid-broker:druid-broker druid/cluster
|
||||
}
|
||||
|
||||
# Start Hadoop docker if needed
|
||||
if [ -n "$DRUID_INTEGRATION_TEST_START_HADOOP_DOCKER" ] && [ "$DRUID_INTEGRATION_TEST_START_HADOOP_DOCKER" == true ]
|
||||
then
|
||||
# Start Hadoop docker container
|
||||
docker run -d --privileged --net druid-it-net --ip 172.172.172.13 -h druid-it-hadoop --name druid-it-hadoop -p 2049:2049 -p 2122:2122 -p 8020:8020 -p 8021:8021 -p 8030:8030 -p 8031:8031 -p 8032:8032 -p 8033:8033 -p 8040:8040 -p 8042:8042 -p 8088:8088 -p 8443:8443 -p 9000:9000 -p 10020:10020 -p 19888:19888 -p 34455:34455 -p 49707:49707 -p 50010:50010 -p 50020:50020 -p 50030:50030 -p 50060:50060 -p 50070:50070 -p 50075:50075 -p 50090:50090 -p 51111:51111 -v $SHARED_DIR:/shared druid-it/hadoop:2.8.5 sh -c "/etc/bootstrap.sh && tail -f /dev/null"
|
||||
|
||||
# wait for hadoop namenode to be up
|
||||
echo "Waiting for hadoop namenode to be up"
|
||||
docker exec -t druid-it-hadoop sh -c "./usr/local/hadoop/bin/hdfs dfs -mkdir -p /druid"
|
||||
while [ $? -ne 0 ]
|
||||
do
|
||||
sleep 2
|
||||
docker exec -t druid-it-hadoop sh -c "./usr/local/hadoop/bin/hdfs dfs -mkdir -p /druid"
|
||||
done
|
||||
echo "Finished waiting for Hadoop namenode"
|
||||
|
||||
# Setup hadoop druid dirs
|
||||
echo "Setting up druid hadoop dirs"
|
||||
docker exec -t druid-it-hadoop sh -c "./usr/local/hadoop/bin/hdfs dfs -mkdir -p /druid"
|
||||
docker exec -t druid-it-hadoop sh -c "./usr/local/hadoop/bin/hdfs dfs -mkdir -p /druid/segments"
|
||||
docker exec -t druid-it-hadoop sh -c "./usr/local/hadoop/bin/hdfs dfs -mkdir -p /quickstart"
|
||||
docker exec -t druid-it-hadoop sh -c "./usr/local/hadoop/bin/hdfs dfs -chmod 777 /druid"
|
||||
docker exec -t druid-it-hadoop sh -c "./usr/local/hadoop/bin/hdfs dfs -chmod 777 /druid/segments"
|
||||
docker exec -t druid-it-hadoop sh -c "./usr/local/hadoop/bin/hdfs dfs -chmod 777 /quickstart"
|
||||
docker exec -t druid-it-hadoop sh -c "./usr/local/hadoop/bin/hdfs dfs -chmod -R 777 /tmp"
|
||||
docker exec -t druid-it-hadoop sh -c "./usr/local/hadoop/bin/hdfs dfs -chmod -R 777 /user"
|
||||
# Copy data files to Hadoop container
|
||||
docker exec -t druid-it-hadoop sh -c "./usr/local/hadoop/bin/hdfs dfs -put /shared/wikiticker-it/wikiticker-2015-09-12-sampled.json.gz /quickstart/wikiticker-2015-09-12-sampled.json.gz"
|
||||
echo "Finished setting up druid hadoop dirs"
|
||||
|
||||
echo "Copying Hadoop XML files to shared"
|
||||
docker exec -t druid-it-hadoop sh -c "cp /usr/local/hadoop/etc/hadoop/*.xml /shared/hadoop_xml"
|
||||
echo "Copied Hadoop XML files to shared"
|
||||
fi
|
||||
}
|
|
@ -14,7 +14,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
for node in druid-historical druid-coordinator druid-overlord druid-router druid-router-permissive-tls druid-router-no-client-auth-tls druid-router-custom-check-tls druid-broker druid-middlemanager druid-zookeeper-kafka druid-metadata-storage;
|
||||
for node in druid-historical druid-coordinator druid-overlord druid-router druid-router-permissive-tls druid-router-no-client-auth-tls druid-router-custom-check-tls druid-broker druid-middlemanager druid-zookeeper-kafka druid-metadata-storage druid-it-hadoop;
|
||||
|
||||
do
|
||||
docker stop $node
|
||||
|
|
Loading…
Reference in New Issue