mirror of https://github.com/apache/druid.git
108 lines
3.9 KiB
Docker
108 lines
3.9 KiB
Docker
# Creates pseudo distributed hadoop 2.8.3 with java 8
|
|
#
|
|
# Modified from the SequenceIQ Dockerfiles at https://github.com/sequenceiq/hadoop-docker
|
|
#
|
|
# docker build -t druid-hadoop-demo:2.8.3 .
|
|
|
|
FROM sequenceiq/pam:centos-6.5
|
|
MAINTAINER SequenceIQ
|
|
|
|
USER root
|
|
|
|
# install dev tools
|
|
RUN yum clean all \
|
|
&& rpm --rebuilddb \
|
|
&& yum install -y curl which tar sudo openssh-server openssh-clients rsync yum-plugin-ovl\
|
|
&& yum clean all \
|
|
&& yum update -y libselinux \
|
|
&& yum clean all
|
|
# update libselinux. see https://github.com/sequenceiq/hadoop-docker/issues/14
|
|
|
|
# passwordless ssh
|
|
RUN ssh-keygen -q -N "" -t dsa -f /etc/ssh/ssh_host_dsa_key
|
|
RUN ssh-keygen -q -N "" -t rsa -f /etc/ssh/ssh_host_rsa_key
|
|
RUN ssh-keygen -q -N "" -t rsa -f /root/.ssh/id_rsa
|
|
RUN cp /root/.ssh/id_rsa.pub /root/.ssh/authorized_keys
|
|
|
|
# zulu java 8
|
|
RUN rpm --import http://repos.azulsystems.com/RPM-GPG-KEY-azulsystems
|
|
RUN rpm --rebuilddb
|
|
RUN sudo curl -o /etc/yum.repos.d/zulu.repo http://repos.azulsystems.com/rhel/zulu.repo
|
|
RUN yum install -y zulu-8
|
|
|
|
ENV JAVA_HOME /usr/lib/jvm/zulu-8
|
|
ENV PATH $PATH:$JAVA_HOME/bin
|
|
|
|
# hadoop
|
|
RUN curl -s https://archive.apache.org/dist/hadoop/core/hadoop-2.8.3/hadoop-2.8.3.tar.gz | tar -xz -C /usr/local/
|
|
RUN cd /usr/local && ln -s ./hadoop-2.8.3 hadoop
|
|
|
|
ENV HADOOP_PREFIX /usr/local/hadoop
|
|
ENV HADOOP_COMMON_HOME /usr/local/hadoop
|
|
ENV HADOOP_HDFS_HOME /usr/local/hadoop
|
|
ENV HADOOP_MAPRED_HOME /usr/local/hadoop
|
|
ENV HADOOP_YARN_HOME /usr/local/hadoop
|
|
ENV HADOOP_CONF_DIR /usr/local/hadoop/etc/hadoop
|
|
ENV YARN_CONF_DIR $HADOOP_PREFIX/etc/hadoop
|
|
|
|
RUN sed -i '/^export JAVA_HOME/ s:.*:export JAVA_HOME=/usr/lib/jvm/zulu-8\nexport HADOOP_PREFIX=/usr/local/hadoop\nexport HADOOP_HOME=/usr/local/hadoop\n:' $HADOOP_PREFIX/etc/hadoop/hadoop-env.sh
|
|
RUN sed -i '/^export HADOOP_CONF_DIR/ s:.*:export HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop/:' $HADOOP_PREFIX/etc/hadoop/hadoop-env.sh
|
|
|
|
RUN mkdir $HADOOP_PREFIX/input
|
|
RUN cp $HADOOP_PREFIX/etc/hadoop/*.xml $HADOOP_PREFIX/input
|
|
|
|
# pseudo distributed
|
|
ADD core-site.xml.template $HADOOP_PREFIX/etc/hadoop/core-site.xml.template
|
|
RUN sed s/HOSTNAME/localhost/ /usr/local/hadoop/etc/hadoop/core-site.xml.template > /usr/local/hadoop/etc/hadoop/core-site.xml
|
|
ADD hdfs-site.xml $HADOOP_PREFIX/etc/hadoop/hdfs-site.xml
|
|
|
|
ADD mapred-site.xml $HADOOP_PREFIX/etc/hadoop/mapred-site.xml
|
|
ADD yarn-site.xml $HADOOP_PREFIX/etc/hadoop/yarn-site.xml
|
|
|
|
RUN $HADOOP_PREFIX/bin/hdfs namenode -format
|
|
|
|
ADD ssh_config /root/.ssh/config
|
|
RUN chmod 600 /root/.ssh/config
|
|
RUN chown root:root /root/.ssh/config
|
|
|
|
# # installing supervisord
|
|
# RUN yum install -y python-setuptools
|
|
# RUN easy_install pip
|
|
# RUN curl https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py -o - | python
|
|
# RUN pip install supervisor
|
|
#
|
|
# ADD supervisord.conf /etc/supervisord.conf
|
|
|
|
ADD bootstrap.sh /etc/bootstrap.sh
|
|
RUN chown root:root /etc/bootstrap.sh
|
|
RUN chmod 700 /etc/bootstrap.sh
|
|
|
|
ENV BOOTSTRAP /etc/bootstrap.sh
|
|
|
|
# workingaround docker.io build error
|
|
RUN ls -la /usr/local/hadoop/etc/hadoop/*-env.sh
|
|
RUN chmod +x /usr/local/hadoop/etc/hadoop/*-env.sh
|
|
RUN ls -la /usr/local/hadoop/etc/hadoop/*-env.sh
|
|
|
|
# Copy additional .jars to classpath
|
|
RUN cp /usr/local/hadoop/share/hadoop/tools/lib/*.jar /usr/local/hadoop/share/hadoop/common/lib/
|
|
|
|
# fix the 254 error code
|
|
RUN sed -i "/^[^#]*UsePAM/ s/.*/#&/" /etc/ssh/sshd_config
|
|
RUN echo "UsePAM no" >> /etc/ssh/sshd_config
|
|
RUN echo "Port 2122" >> /etc/ssh/sshd_config
|
|
|
|
RUN service sshd start && $HADOOP_PREFIX/etc/hadoop/hadoop-env.sh && $HADOOP_PREFIX/sbin/start-dfs.sh && $HADOOP_PREFIX/bin/hdfs dfs -mkdir -p /user/root
|
|
RUN service sshd start && $HADOOP_PREFIX/etc/hadoop/hadoop-env.sh && $HADOOP_PREFIX/sbin/start-dfs.sh && $HADOOP_PREFIX/bin/hdfs dfs -put $HADOOP_PREFIX/etc/hadoop/ input
|
|
|
|
CMD ["/etc/bootstrap.sh", "-d"]
|
|
|
|
# Hdfs ports
|
|
EXPOSE 50010 50020 50070 50075 50090 8020 9000
|
|
# Mapred ports
|
|
EXPOSE 10020 19888
|
|
#Yarn ports
|
|
EXPOSE 8030 8031 8032 8033 8040 8042 8088
|
|
#Other ports
|
|
EXPOSE 49707 2122
|