1) 在所有节点上下载并安装、配置hadoop
[hadoop@srv1 ~]$ wget \
https://mirror.tuna.tsinghua.edu.cn/apache/hadoop/common/hadoop-3.2.1/hadoop-3.2.1.tar.gz
[hadoop@srv2 ~]$ wget \
https://mirror.tuna.tsinghua.edu.cn/apache/hadoop/common/hadoop-3.2.1/hadoop-3.2.1.tar.gz
[hadoop@srv3 ~]$ wget \
https://mirror.tuna.tsinghua.edu.cn/apache/hadoop/common/hadoop-3.2.1/hadoop-3.2.1.tar.gz
# 解压时去除第1层目录
[hadoop@srv1 ~]$ tar zxvf hadoop-3.2.1.tar.gz -C /usr/hadoop --strip-components 1
[hadoop@srv2 ~]$ tar zxvf hadoop-3.2.1.tar.gz -C /usr/hadoop --strip-components 1
[hadoop@srv3 ~]$ tar zxvf hadoop-3.2.1.tar.gz -C /usr/hadoop --strip-components 1
# srv1配置
[hadoop@srv1 ~]$ vim ~/.bash_profile
......
......
......
......
......
......
# 于文件最后添加如下内容
export HADOOP_HOME=/usr/hadoop
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_YARN_HOME=$HADOOP_HOME
export HADOOP_CLASSPATH=${HADOOP_HOME}
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib/native"
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin
[hadoop@srv1 ~]$ source ~/.bash_profile
# srv2配置
[hadoop@srv2 ~]$ vim ~/.bash_profile
......
......
......
......
......
......
# 于文件最后添加如下内容
export HADOOP_HOME=/usr/hadoop
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_YARN_HOME=$HADOOP_HOME
export HADOOP_CLASSPATH=${HADOOP_HOME}
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib/native"
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin
[hadoop@srv2 ~]$ source ~/.bash_profile
# srv3配置
[hadoop@srv3 ~]$ vim ~/.bash_profile
......
......
......
......
......
......
# 于文件最后添加如下内容
export HADOOP_HOME=/usr/hadoop
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_YARN_HOME=$HADOOP_HOME
export HADOOP_CLASSPATH=${HADOOP_HOME}
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib/native"
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin
[hadoop@srv3 ~]$ source ~/.bash_profile
3) 配置hadoop
[hadoop@srv1 ~]$ mkdir ~/datanode
[hadoop@srv1 ~]$ ssh srv2.1000cc.net "mkdir ~/datanode"
[hadoop@srv1 ~]$ ssh srv3.1000cc.net "mkdir ~/datanode"
(1) 配置hdfs-site
[hadoop@srv1 ~]$ vim ~/etc/hadoop/hdfs-site.xml
# 在[configuration]区段中增加如下内容
<configuration>
<property>
<name>dfs.replication</name>
<value>2</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:///usr/hadoop/datanode</value>
</property>
</configuration>
[hadoop@srv1 ~]$ scp ~/etc/hadoop/hdfs-site.xml srv2.1000cc.net:~/etc/hadoop/
[hadoop@srv2 ~]$ scp ~/etc/hadoop/hdfs-site.xml srv3.1000cc.net:~/etc/hadoop/
(2) 配置core-site
[hadoop@srv1 ~]$ vim ~/etc/hadoop/core-site.xml
# 在[configuration]区段中增加如下内容
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://srv1.1000cc.net:9000/</value>
</property>
</configuration>
[hadoop@srv1 ~]$ scp ~/etc/hadoop/core-site.xml srv2.1000cc.net:~/etc/hadoop/
[hadoop@srv1 ~]$ scp ~/etc/hadoop/core-site.xml srv3.1000cc.net:~/etc/hadoop/
(3) 设定JDK环境
[hadoop@srv1 ~]$ sed -i -e 's/\${JAVA_HOME}/\/usr\/java\/default/' ~/etc/hadoop/hadoop-env.sh
[hadoop@srv1 ~]$ scp ~/etc/hadoop/hadoop-env.sh srv2.1000cc.net:~/etc/hadoop/
[hadoop@srv1 ~]$ scp ~/etc/hadoop/hadoop-env.sh srv3.1000cc.net:~/etc/hadoop/
(4) 增加namenode信息
[hadoop@srv1 ~]$ mkdir ~/namenode
[hadoop@srv1 ~]$ vim ~/etc/hadoop/hdfs-site.xml
# 在[configuration]区段中增加如下内容
<configuration>
<property>
<name>dfs.replication</name>
<value>2</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:///usr/hadoop/datanode</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:///usr/hadoop/namenode</value>
</property>
</configuration>
(5) 设定Mapred-site
# 获取classpath的值
[hadoop@srv1 ~]$ yarn classpath
/usr/hadoop/etc/hadoop:/usr/hadoop/share/hadoop/common/lib/*:/usr/hadoop/share/hadoop/common/*:/usr/hadoop/share/hadoop/hdfs:
/usr/hadoop/share/hadoop/hdfs/lib/*:/usr/hadoop/share/hadoop/hdfs/*:/usr/hadoop/share/hadoop/mapreduce/lib/*:
/usr/hadoop/share/hadoop/mapreduce/*:/usr/hadoop/share/hadoop/yarn:/usr/hadoop/share/hadoop/yarn/lib/*:
/usr/hadoop/share/hadoop/yarn/*
[hadoop@srv1 ~]$ vim ~/etc/hadoop/mapred-site.xml
# 在[configuration]区段中增加如下内容
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
<property>
<name>yarn.app.mapreduce.am.env</name>
<value>
/usr/hadoop/etc/hadoop,
/usr/hadoop/share/hadoop/common/lib/*,
/usr/hadoop/share/hadoop/common/*,
/usr/hadoop/share/hadoop/hdfs,
/usr/hadoop/share/hadoop/hdfs/lib/*,
/usr/hadoop/share/hadoop/hdfs/*,
/usr/hadoop/share/hadoop/mapreduce/lib/*,
/usr/hadoop/share/hadoop/mapreduce/*,
/usr/hadoop/share/hadoop/yarn,
/usr/hadoop/share/hadoop/yarn/lib/*,
/usr/hadoop/share/hadoop/yarn/*
</value>
</property>
</configuration>
(6) 设定yarn-site
[hadoop@srv1 ~]$ vim ~/etc/hadoop/yarn-site.xml
# 在[configuration]区段中删除注释行,并增加如下内容
<configuration>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>srv1.1000cc.net</value>
</property>
<property>
<name>yarn.nodemanager.hostname</name>
<value>srv1.1000cc.net</value>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>
(7) 添加slaves
hadoop@srv1 ~]$ vim ~/etc/hadoop/slaves
srv1.1000cc.net
srv2.1000cc.net
srv3.1000cc.net
|