forked from alaeddinehamroun/hadoop-cluster-docker
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathDockerfile
88 lines (71 loc) · 3.22 KB
/
Dockerfile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
FROM ubuntu:latest
WORKDIR /root
# install requisites
RUN apt-get update && apt-get install -y openssh-server openjdk-8-jdk ssh wget curl vim python3 && \
rm -rf /var/lib/apt/lists/*
RUN curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
python3 get-pip.py && \
rm get-pip.py && \
python3 -m pip install --upgrade pip setuptools
# Install Hadoop
RUN wget https://archive.apache.org/dist/hadoop/common/hadoop-3.3.6/hadoop-3.3.6.tar.gz && \
tar -xzf hadoop-3.3.6.tar.gz && \
mv hadoop-3.3.6 /usr/local/hadoop && \
rm hadoop-3.3.6.tar.gz
# Install Spark
RUN wget https://archive.apache.org/dist/spark/spark-3.5.0/spark-3.5.0-bin-hadoop3.tgz && \
tar -xzf spark-3.5.0-bin-hadoop3.tgz && \
mv spark-3.5.0-bin-hadoop3 /usr/local/spark && \
rm spark-3.5.0-bin-hadoop3.tgz
RUN pip install pyspark
# Install Kafka
RUN wget https://archive.apache.org/dist/kafka/3.6.1/kafka_2.13-3.6.1.tgz && \
tar -xzf kafka_2.13-3.6.1.tgz && \
mv kafka_2.13-3.6.1 /usr/local/kafka && \
rm kafka_2.13-3.6.1.tgz
# Install HBase
RUN wget https://downloads.apache.org/hbase/2.5.8/hbase-2.5.8-hadoop3-bin.tar.gz && \
tar -xzf hbase-2.5.8-hadoop3-bin.tar.gz && \
mv hbase-2.5.8-hadoop3 /usr/local/hbase && \
rm hbase-2.5.8-hadoop3-bin.tar.gz
# set environment variables
ENV JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
ENV HADOOP_HOME=/usr/local/hadoop
ENV YARN_HOME=/usr/local/hadoop
ENV SPARK_HOME=/usr/local/spark
ENV KAFKA_HOME=/usr/local/kafka
ENV HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop
ENV YARN_CONF_DIR=/usr/local/hadoop/etc/hadoop
ENV LD_LIBRARY_PATH=/usr/local/hadoop/lib/native:$LD_LIBRARY_PATH
ENV HBASE_HOME=/usr/local/hbase
ENV CLASSPATH=$CLASSPATH:/usr/local/hbase/lib/*
ENV PATH=$PATH:/usr/local/hadoop/bin:/usr/local/hadoop/sbin:/usr/local/spark/bin:/usr/local/kafka/bin:/usr/local/hbase/bin
# ssh without key
RUN ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa && \
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys && \
chmod 0600 ~/.ssh/authorized_keys
RUN mkdir -p ~/hdfs/namenode && \
mkdir -p ~/hdfs/datanode && \
mkdir $HADOOP_HOME/logs
COPY config/* /tmp/
RUN mv /tmp/ssh_config ~/.ssh/config && \
mv /tmp/hadoop-env.sh /usr/local/hadoop/etc/hadoop/hadoop-env.sh && \
mv /tmp/hdfs-site.xml $HADOOP_HOME/etc/hadoop/hdfs-site.xml && \
mv /tmp/core-site.xml $HADOOP_HOME/etc/hadoop/core-site.xml && \
mv /tmp/mapred-site.xml $HADOOP_HOME/etc/hadoop/mapred-site.xml && \
mv /tmp/yarn-site.xml $HADOOP_HOME/etc/hadoop/yarn-site.xml && \
mv /tmp/workers $HADOOP_HOME/etc/hadoop/workers && \
mv /tmp/start-kafka-zookeeper.sh ~/start-kafka-zookeeper.sh && \
mv /tmp/start-hadoop.sh ~/start-hadoop.sh && \
mv /tmp/run-wordcount.sh ~/run-wordcount.sh && \
mv /tmp/spark-defaults.conf $SPARK_HOME/conf/spark-defaults.conf && \
mv /tmp/hbase-env.sh $HBASE_HOME/conf/hbase-env.sh && \
mv /tmp/hbase-site.xml $HBASE_HOME/conf/hbase-site.xml
RUN chmod +x ~/start-hadoop.sh && \
chmod +x ~/start-kafka-zookeeper.sh && \
chmod +x ~/run-wordcount.sh && \
chmod +x $HADOOP_HOME/sbin/start-dfs.sh && \
chmod +x $HADOOP_HOME/sbin/start-yarn.sh
# format namenode
RUN /usr/local/hadoop/bin/hdfs namenode -format
CMD [ "sh", "-c", "service ssh start; bash"]