hadoop

[root@server1 ~]# useradd -u 800 hadoop
[root@server1 ~]# passwd hadoop
[root@server1 ~]# su - hadoop
[hadoop@server1 ~]$ ls
hadoop-2.7.3.tar.gz  jdk-7u79-linux-x64.tar.gz
[hadoop@server1 ~]$ tar zxf jdk-7u79-linux-x64.tar.gz 
[hadoop@server1 ~]$ ln -s jdk1.7.0_79/ java
[hadoop@server1 ~]$ tar zxf hadoop-2.7.3.tar.gz 
[hadoop@server1 ~]$ cd hadoop-2.7.3
[hadoop@server1 hadoop-2.7.3]$ cd etc/hadoop/
[hadoop@server1 hadoop]$ vim hadoop-env.sh 
 25 export JAVA_HOME=/home/hadoop/java
[hadoop@server1 hadoop]$ cd
[hadoop@server1 ~]$ vim .bash_profile
 10 PATH=$PATH:$HOME/bin:~/java/bin
[hadoop@server1 ~]$ source .bash_profile 
[hadoop@server1 ~]$ cd hadoop-2.7.3
[hadoop@server1 hadoop-2.7.3]$ mkdir input
[hadoop@server1 hadoop-2.7.3]$ cp etc/hadoop/*.xml input
[hadoop@server1 hadoop-2.7.3]$ bin/hadoop jar share/hadoop/mapreduce/hadoop-mapreduce-examples-2.7.3.jar grep input output 'dfs[a-z.]+'
[hadoop@server1 hadoop-2.7.3]$ cd output/
[hadoop@server1 output]$ ls
part-r-00000  _SUCCESS
[hadoop@server1 output]$ cat part-r-00000 
1   dfsadmin
[hadoop@server1 ~]$ ln -s hadoop-2.7.3 hadoop
[hadoop@server1 ~]$ cd hadoop
[hadoop@server1 hadoop]$ cd etc/hadoop/
[hadoop@server1 hadoop]$ vim core-site.xml 
<property>
         <name>fs.defaultFS</name>
                 <value>hdfs://172.25.40.1:9000</value>
                         </property>

这里写图片描述

[hadoop@server1 hadoop]$ vim hdfs-site.xml 
<property>
         <name>dfs.replication</name>
                 <value>1</value>
                        </property>

这里写图片描述

[hadoop@server1 hadoop]$ vim slaves
172.25.40.1     #将localhost改为节点主机IP

生成密钥,做免密连接

[hadoop@server1 hadoop]$ ssh-keygen
[hadoop@server1 hadoop]$ cd
[hadoop@server1 ~]$ cd .ssh/
[hadoop@server1 .ssh]$ ls
id_rsa  id_rsa.pub
[hadoop@server1 .ssh]$ cp id_rsa.pub authorized_keys

格式化

[hadoop@server1 ~]$ cd hadoop
[hadoop@server1 hadoop]$ ls
bin  include  lib      LICENSE.txt  output      sbin
etc  input    libexec  NOTICE.txt   README.txt  share
[hadoop@server1 hadoop]$ bin/hdfs namenode -format

这里写图片描述
开启服务

[hadoop@server1 hadoop]$ sbin/start-dfs.sh
[hadoop@server1 hadoop]$ jps

这里写图片描述

网页访问http://172.25.40.1:50070
这里写图片描述

[hadoop@server1 hadoop]$ bin/hdfs dfs -mkdir /user
[hadoop@server1 hadoop]$ bin/hdfs dfs -mkdir /user/hadoop
[hadoop@server1 hadoop]$ bin/hdfs dfs -ls
[hadoop@server1 hadoop]$ bin/hdfs dfs -put input/
[hadoop@server1 hadoop]$ bin/hdfs dfs -ls
Found 1 items
drwxr-xr-x   - hadoop supergroup          0 2018-08-26 16:51 input

这里写图片描述

分布式

[hadoop@server1 hadoop]$ logout
[root@server1 ~]# yum install -y rpcbind-0.2.0-11.el6.x86_64
[root@server1 ~]# /etc/init.d/rpc start
[root@server1 ~]# vim /etc/exports
    /home/hadoop    *(rw,anonuid=800,anongid=800
[root@server1 ~]# /etc/init.d/nfs start

这里写图片描述

[root@server1 ~]# showmount -e

这里写图片描述

在【server2】和【server3】上同时操作以下步骤,作为分布式节点

[root@server2 ~]# yum install -y nfs-utils
[root@server2 ~]# /etc/init.d/rpcbind start

需要建立与【server1】相同的用户

[root@server2 ~]# useradd -u 800 hadoop
[root@server2 ~]# id hadoop
uid=800(hadoop) gid=800(hadoop) groups=800(hadoop)

挂载到【server1】上

[root@server2 ~]# mount 172.25.40.1:/home/hadoop/ /home/hadoop/
[root@server2 ~]# ll -d /home/hadoop/
drwx------ 5 hadoop hadoop 4096 Aug 26 16:57 /home/hadoop/

登陆hadoop用户时可以看到server1在这个用户里的文件

[root@server2 ~]# su - hadoop
[hadoop@server2 ~]$ ls
hadoop        hadoop-2.7.3.tar.gz  jdk1.7.0_79
hadoop-2.7.3  java                 jdk-7u79-linux-x64.tar.gz

在【server1】上

[root@server1 ~]# su - hadoop
[hadoop@server1 ~]$ cd hadoop
[hadoop@server1 hadoop]$ cd etc/hadoop/
[hadoop@server1 hadoop]$ vim hdfs-site.xml 

这里写图片描述

[hadoop@server1 hadoop]$ vim slaves
[hadoop@server1 hadoop]$ cat slaves
172.25.40.2
172.25.40.3
[hadoop@server1 hadoop]$ cd /tmp/
[hadoop@server1 tmp]$ ls
[hadoop@server1 tmp]$ rm -fr *
[hadoop@server1 ~]$ cd hadoop
[hadoop@server1 hadoop]$ bin/hdfs namenode -format
[hadoop@server1 hadoop]$ sbin/start-dfs.sh 

这里写图片描述

[hadoop@server1 hadoop]$ bin/hdfs dfs -mkdir /user
[hadoop@server1 hadoop]$ bin/hdfs dfs -mkdir /user/hadoop
[hadoop@server1 hadoop]$ bin/hdfs dfs -put input
[hadoop@server1 hadoop]$ bin/hadoop jar share/hadoop/mapreduce/hadoop-mapreduce-examples-2.7.3.jar wordcount input output

这里写图片描述

[hadoop@server1 hadoop]$ bin/hdfs dfs -ls output/
Found 2 items
-rw-r--r--   2 hadoop supergroup          0 2018-08-26 17:02 output/_SUCCESS
-rw-r--r--   2 hadoop supergroup       9984 2018-08-26 17:02 output/part-r-00000
[hadoop@server1 hadoop]$ rm -fr output/

这里写图片描述

[hadoop@server1 hadoop]$ bin/hdfs dfs -cat output/*

这里写图片描述

[hadoop@server1 hadoop]$ bin/hdfs dfs -get output
[hadoop@server1 hadoop]$ cd output/
[hadoop@server1 output]$ ls
[hadoop@server1 output]$ cat part-r-00000 

这里写图片描述

数据节点的添加和删除:
首先查看节点:
这里写图片描述
搭建server4:

[root@server4 ~]# yum install -y nfs-utils
[root@server4 ~]# /etc/init.d/rpcbind start
[root@server4 ~]# useradd -u 800 hadoop
[root@server4 ~]# id hadoop
uid=800(hadoop) gid=800(hadoop) groups=800(hadoop)
[root@server4 ~]# mount 172.25.40.1:/home/hadoop/ /home/hadoop/
[root@server4 ~]# df

这里写图片描述

[hadoop@server4 hadoop]$ vim slaves 
[hadoop@server4 hadoop]$ cat slaves 
172.25.40.2
172.25.40.3
172.25.40.4
[hadoop@server4 hadoop]$ cd
[hadoop@server4 ~]$ cd hadoop
[hadoop@server4 hadoop]$ sbin/hadoop-daemon.sh start datanode
[hadoop@server4 hadoop]$ jps

这里写图片描述

再次查看节点信息:点击overview
这里写图片描述

[hadoop@server1 output]$ cd ..
[hadoop@server1 hadoop]$ dd if=/dev/zero of=bigfile bs=1M count=300
300+0 records in
300+0 records out
314572800 bytes (315 MB) copied, 1.65197 s, 190 MB/s

这里写图片描述

[hadoop@server1 hadoop]$ bin/hdfs dfs -put bigfile
[hadoop@server1 hadoop]$ cd etc/hadoop/
[hadoop@server1 hadoop]$ vim slaves 
[hadoop@server1 hadoop]$ cat slaves
172.25.40.2
172.25.40.4
[hadoop@server1 hadoop]$ vim hosts-exclude
[hadoop@server1 hadoop]$ cat hosts-exclude
172.25.40.3

这里写图片描述

[hadoop@server1 hadoop]$ vim hdfs-site.xml 
<property>
        <name>dfs.hosts.exclude</name>
                <value>/home/hadoop/hadoop/etc/hadoop/hosts-exclude</value>
                </property>

这里写图片描述

[hadoop@server1 hadoop]$ ll /home/hadoop/hadoop/etc/hadoop/hosts-exclude 
-rw-rw-r-- 1 hadoop hadoop 12 Aug 26 19:08 /home/hadoop/hadoop/etc/hadoop/hosts-exclude
[hadoop@server1 hadoop]$ cd ..
[hadoop@server1 etc]$ cd ..
[hadoop@server1 hadoop]$ bin/hdfs dfsadmin -refreshNodes

这里写图片描述

[hadoop@server1 hadoop]$ bin/hdfs dfsadmin -report

这里写图片描述

这里写图片描述

[hadoop@server3 ~]$ jps
2119 DataNode
2264 Jps
[hadoop@server3 ~]$  cd hadoop
[hadoop@server3 hadoop]$ sbin/hadoop-daemon.sh stop datanode
stopping datanode

这里写图片描述

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值