哔哩哔哩:https://www.bilibili.com/video/av71702657/(视频辅助)
关防火墙
service iptables stop
chkconfig iptables off
映射(/etc/hosts)
127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4
::1 localhost localhost.localdomain localhost6 localhost6.localdomain6
192.168.182.99 master
192.168.182.98 slave1
192.168.182.97 slave2
免密操作
ssh-keygen -t rsa
ssh-copy-id slave1
etc/profile
export JAVA_HOME=/root/software/jdk1.8.0_181
export PATH=$PATH:$JAVA_HOME/bin
export HADOOP_HOME=/root/software/hadoop-2.7.5
export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
刷新环境配置
source /etc/profile
hadoop-env.sh
export JAVA_HOME=/root/software/jdk1.8.0_181
core-site.xml
<property>
<name>fs.defaultFS</name>
<value>hdfs://master:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/root/software/hadoop_tmp</value>
</property>
重新拷贝一份mapred-site.xml
cp mapred-site.xml.template mapred-site.xml
mapred-site.xml
<!-- 通知框架MR使用YARN -->
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
yarn-site.xml
<!-- NodeManager上运行的附属服务。需配置成mapreduce_shuffle,才可运行MapReduce程序 -->
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
slaves
slave1
slave2
创建文件夹(另外两台)
mkdir software
发送文件给其他结点
scp -r hadoop-2.7.5/ slave1:/root/software/
scp -r hadoop-2.7.5/ slave2:/root/software/
scp -r jdk1.8.0_181/ slave1:/root/software/
scp -r jdk1.8.0_181/ slave2:/root/software/
scp /etc/profile slave1:/etc/
scp /etc/profile slave2:/etc/
刷新另外两台的环境
source /etc/profile
格式化namenode
hadoop namenode -format
启动HDFS
start-dfs.sh
启动Yarn
start-yarn.sh
jps结果
master
2129 SecondaryNameNode
2530 Jps
1948 NameNode
2285 ResourceManager
slave1
1744 NodeManager
1841 Jps
1634 DataNode
slave2
1705 NodeManager
1803 Jps
1598 DataNode