【hadoop】【01】HDFS安装

1.安装jdk

https://blog.csdn.net/huiyanshizhen21/article/details/100176217
[root@master ~]# echo $JAVA_HOME
/usr/local/java/jdk1.8.0_211

2.下载hadoop

下载
cd /usr/local/src && wget https://mirrors.tuna.tsinghua.edu.cn/apache/hadoop/core/hadoop-2.9.2/hadoop-2.9.2.tar.gz
解压到/usr/local
tar -zxvf hadoop-2.9.2.tar.gz -C /usr/local && cd /usr/local && ll

3.配置java环境变量hadoop-env.sh

[root@master ~]# echo $JAVA_HOME
/usr/local/java/jdk1.8.0_211
vim /usr/local/hadoop-2.9.2/etc/hadoop/hadoop-env.sh
export JAVA_HOME=/usr/local/java/jdk1.8.0_211

4.配置fs core-site.xml

创建hadoop临时目录
mkdir -p /usr/local/hadoop-2.9.2/data/tmp
vim /usr/local/hadoop-2.9.2/etc/hadoop/core-site.xml
<configuration>
    <property>
        <name>fs.defaultFS</name>
        <value>hdfs://master:9000</value>
    </property>
        <property>
        <name>hadoop.tmp.dir</name>
        <value>file:/usr/local/hadoop-2.9.2/data/tmp</value>
    </property>
	 <property>
        <name>fs.trash.interval</name>
        <value>1440</value>
    </property>
</configuration>
    

5.配置hdfs-site.xml

挂载硬盘
https://blog.csdn.net/huiyanshizhen21/article/details/104888121
vim /usr/local/hadoop-2.9.2/etc/hadoop/hdfs-site.xml
<configuration>
	<property>
        <name>dfs.namenode.name.dir</name>
        <value>file:/mnt/hdfsdata/name</value>
    </property>
    <property>
        <name>dfs.datanode.data.dir</name>
        <value>file:/mnt/hdfsdata/data</value>
    </property>
    <property>
        <name>dfs.replication</name>
        <value>1</value>
    </property>
	
	<property>
        <name>dfs.permissions.enabled</name>
        <value>false</value>
    </property>

</configuration> 

6.配置slave

vim /usr/local/hadoop-2.9.2/etc/hadoop/slaves
slave1
slave2

7.免密登录

master免密登录master、slave1、slave2

ssh-keygen
#4个回车
#将25机器撒谎给你公钥拷贝到26上
ssh-copy-id -i ~/.ssh/id_rsa.pub root@master
ssh-copy-id -i ~/.ssh/id_rsa.pub root@slave1
ssh-copy-id -i ~/.ssh/id_rsa.pub root@slave2

8.将master的hadoop拷贝到slave1、slave2

scp -r  /usr/local/hadoop-2.9.2 slave1:/usr/local
scp -r  /usr/local/hadoop-2.9.2 slave2:/usr/local

9.设置hadoop环境变量

vim /etc/profile
export HADOOP_HOME=/usr/local/hadoop-2.9.2
export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin

source /etc/profile

10.格式话dfs

/usr/local/hadoop-2.9.2/bin/hdfs namenode -format

11.启动dfs

/usr/local/hadoop-2.9.2/sbin/start-dfs.sh

12.访问

http://master:50070/

13.开机启动

13.1新建脚本

vim /etc/rc.d/init.d/hdfs

#!/bin/bash
 
#chkconfig:2345 20 90
 
#description:hdfs
 
#processname:hdfs
export JAVA_HOME=/usr/local/java/jdk1.8.0_211
case $1 in
          start) su root /usr/local/hadoop-2.9.2/sbin/start-dfs.sh;;
          stop) su root /usr/local/hadoop-2.9.2/sbin/stop-dfs.sh;;
     *)  echo "require start|stop"  ;;

esac

13.2设置执行权限

cd /etc/rc.d/init.d/ && chmod +x hdfs 

13.3设置开机启动

chkconfig --add hdfs

13.4查看是否设置成功

chkconfig --list 

14.停止dfs

/usr/local/hadoop-2.9.2/sbin/stop-dfs.sh
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值