hadoop安装操作记录

26 篇文章 0 订阅

配置ip

https://blog.csdn.net/xiaohxx/article/details/79686036

配置免登录

https://blog.csdn.net/xiaohxx/article/details/79687590

安装jdk

https://blog.csdn.net/xiaohxx/article/details/79688212

--------------------开始装hadoop-----------------------------------

拷贝hadoop到机子

mkdir /home/xhdata

cd /home/xhdata

解压

tar -zvxf hadoop-2.7.3.tar.gz

cd hadoop-2.7.3\etc\hadoop

----------------------------------------------------
vi core-site.xml
找到configuration节点
在节点中间插入
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>

<property>
<name>fs.default.name</name>
<value>hdfs://192.168.1.111:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/home/xhdata/hadoop-2.7.3/data/hadooptemp</value>
</property>

</configuration>


保存
--------------------------------------------------------------------------
vi hadoop-env.sh
找到export JAVA_HOME=****
改成export JAVA_HOME=/home/xhdata/jdk1.7.0_79

找到export HADOOP_PID_DIR=*****
改成export HADOOP_PID_DIR=/home/xhdata/hadoop-2.7.3/data/pid

找到export HADOOP_SECURE_DN_PID_DIR=******
改成export HADOOP_SECURE_DN_PID_DIR=/home/xhdata/hadoop-2.7.3/data/pid

保存
--------------------------------------------------------------
vi hdfs-site.xml
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>

<property>
<name>dfs.namenode.name.dir</name>
<value>/home/xhdata/hadoop-2.7.3/data/hadoop/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>/home/xhdata/hadoop-2.7.3/data/hadoop/data</value>
</property>
<property>
<name>dfs.replication</name>
<value>3</value>
</property>

</configuration>
保存
---------------------------------------------------------------------------------------
cat mapred-site.xml.template >> mapred-site.xml
vi mapred-site.xml
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>

<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
<description>Execution framework set to Hadoop YARN</description>
</property>

</configuration>
保存
--------------------------------------------------------------------------
vi slaves

改为
192.168.1.111
192.168.1.112
192.168.1.113

保存
----------------------------------------------------
vi yarn-site.xml
<?xml version="1.0"?>

<configuration>

<property>
<name>yarn.nodemanager.address</name>
<value>192.168.1.119:8041</value>
</property>
<property>
<name>yarn.resouremanager.hostname</name>
<value>192.168.1.119</value>
</property>
<property>
<name>yarn.resourcemanager.scheduler.address</name>
<value>192.168.1.119:8030</value>
</property>
<property>
<name>yarn.resourcemanager.resource-tracker.address</name>
<value>192.168.1.119:8031</value>
</property>
<property>
<name>yarn.resourcemanager.address</name>
<value>192.168.1.119:8032</value>
</property>
<property>
<name>yarn.resourcemanager.admin.address</name>
<value>192.168.1.119:8033</value>
</property>
<property>
<name>yarn.resourcemanager.webapp.address</name>
<value>192.168.1.119:8088</value>
</property>

</configuration>
保存
-------------------系统环境配置--------------------------
scp /etc/profile root@192.168.1.112:/etc/profile
scp /etc/profile root@192.168.1.113:/etc/profile
ssh root@192.168.1.112 "source /etc/profile"
ssh root@192.168.1.113 "source /etc/profile"
--------------------复制文件------------------------------
ssh root@192.168.1.112 "rm -rf /home/xhdata/*"
ssh root@192.168.1.113 "rm -rf /home/xhdata/*"
ssh root@192.168.1.112 "mkdir /home/xhdata"
ssh root@192.168.1.113 "mkdir /home/xhdata"
scp -r /home/xhdata/* root@192.168.1.112:/home/xhdata/
scp -r /home/xhdata/* root@192.168.1.113:/home/xhdata/
--------------------------格式化并且启动---------------------------------------------
cd /home/xhdata/hadoop-2.7.3/
bin/hdfs namenode -format
sbin/start-all.sh

sbin/stop-all.sh

------------------------------------------------

jps 查看hadoop 状态
5个进程

ip:8088  看集群机器

ip:50070 看主机情况


--------------------每次配置错误时重新配置--------------------------------

ssh root@192.168.1.112 "rm -rf /home/xhdata/*"
ssh root@192.168.1.113 "rm -rf /home/xhdata/*"

rm -rf /home/xhdata/hadoop-2.7.3/data/*
ssh root@192.168.1.112 "rm -rf /home/xhdata/hadoop-2.7.3/data/*"
ssh root@192.168.1.113 "rm -rf /home/xhdata/hadoop-2.7.3/data/*"

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值