1. 官网:http://hadoop.apache.org/
2. 下载版本hadoop 2.7.3(http://hadoop.apache.org/releases.html)
3. 预准备环境:
a. Centos7
b. Java8
4. 创建目录 software :mkdir /software
5. 上传hadoop到software
6. tar -xvf hadoop*
7. Vi core-site.xml
a. 在configuration标签中添加以下配置:
<property>
<name>fs.defaultFS</name>
<value>hdfs://192.168.70.111:9000</value>
<description></description>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/software/hadoop/tmp</value>
<description></description>
</property>
8. Vi hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
<description>数据冗余度</description>
</property>
<property>
<name>dfs.permissions</name>
<value>false</value>
<description></description>
</property>
</configuration>
9. Vi mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
<description></description>
</property>
</configuration>
10. Vi yarn-site.xml
<configuration>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>centos111</value>
<description></description>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
<description></description>
</property>
</configuration>
11. Vi .bash_profile(注意=左右不可以有空格)
HADOOP_HOME=/software/hadoop-2.7.3
export HADOOP_HOME
PATH=$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH
export PATH
12. source ~/.bash_profile
13. 格式化namenode
a. hdfs namenode -format
14. 执行start-all.sh
15. 开放端口:firewall-cmd --zone=public --add-port=0-65535/tcp --permanent
firewall-cmd --reload
2. 下载版本hadoop 2.7.3(http://hadoop.apache.org/releases.html)
3. 预准备环境:
a. Centos7
b. Java8
4. 创建目录 software :mkdir /software
5. 上传hadoop到software
6. tar -xvf hadoop*
7. Vi core-site.xml
a. 在configuration标签中添加以下配置:
<property>
<name>fs.defaultFS</name>
<value>hdfs://192.168.70.111:9000</value>
<description></description>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/software/hadoop/tmp</value>
<description></description>
</property>
8. Vi hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
<description>数据冗余度</description>
</property>
<property>
<name>dfs.permissions</name>
<value>false</value>
<description></description>
</property>
</configuration>
9. Vi mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
<description></description>
</property>
</configuration>
10. Vi yarn-site.xml
<configuration>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>centos111</value>
<description></description>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
<description></description>
</property>
</configuration>
11. Vi .bash_profile(注意=左右不可以有空格)
HADOOP_HOME=/software/hadoop-2.7.3
export HADOOP_HOME
PATH=$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH
export PATH
12. source ~/.bash_profile
13. 格式化namenode
a. hdfs namenode -format
14. 执行start-all.sh
15. 开放端口:firewall-cmd --zone=public --add-port=0-65535/tcp --permanent
firewall-cmd --reload