hadoop2.5.0入门安装配置

[b]说明:[/b]
操作系统CentOS6.5 64位
jdk1.7+
hadoop2.5.0
[b]重要配置部分:[/b]
#hosts文件配置 /etc/hosts 其中163为主节点 其他为从节点
192.168.100.163 master
192.168.100.165 node1
192.168.100.166 node2
192.168.100.167 node3

[b]#ssh配置[/b]
#1.生成主节点用户的SSH公钥
# ssh-keygen -t dsa -P '' -f ~/.ssh/id_dsa

#2.本机将~/.ssh/id_dsa.pub添加~/.ssh/authorized_keys文件中
# cat ~/.ssh/id_dsa.pub >> ~/.ssh/authorized_keys

#3.测试本机ssh无密码登陆
# ssh localhost

#4.将~/.ssh/id_dsa.pub添加到目标机器的~/.ssh/authorized_keys文件中
# scp -r ~/.ssh/id_dsa.pub root@node1:.ssh/
# ssh node1
# cat ~/.ssh/id_dsa.pub >> ~/.ssh/authorized_keys
node2,node3重复#4的操作!
#hadoop jdk安装步骤省略
#hadoop 配置
[b]#1. hadoop-env.sh[/b]
# The java implementation to use.
export JAVA_HOME=/usr/java/jdk1.7.0_67

[b]#2. yarn-env.sh[/b]
export JAVA_HOME=/usr/java/jdk1.7.0_67
[b]#3. core-site.xml[/b]
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://master:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/founder/tmp</value>
<description>A base for other temporary directories.</description>
</property>
</configuration>
[b]#4.hdfs-site.xml[/b]
<configuration>
<property>
<name>dfs.replication</name>
<value>3</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:///home/hadoop/hdfs/name</value>
<final>true</final>
</property>
<property>
<name>dfs.dataname.data.dir</name>
<value>file:///home/hadoop/hdfs/data</value>
<final>true</final>
</property>
<property>
<name>dfs.namenode.secondary.http-address</name>
<value>master:9001</value>
</property>
<property>
<name>dfs.webhdfs.enabled</name>
<value>true</value>
</property>
</configuration>

[b]#5. mapred-site.xml[/b]
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>

[b]#6. slaves[/b]
node1
node2
node3
[b]#7. yarn-site.xml[/b]
<configuration>

<!-- Site specific YARN configuration properties -->
<property>
<name>yarn.resourcemanager.hostname</name>
<value>master</value>
<description>The hostname of the RM.</description>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>


[b]# hadoop环境变量设置 /etc/profile[/b]

export HADOOP_DEV_HOME=/founder/hadoop-2.5.0
export PATH=$PATH:$HADOOP_DEV_HOME/bin
export PATH=$PATH:$HADOOP_DEV_HOME/sbin
export HADOOP_MAPARED_HOME=${HADOOP_DEV_HOME}
export HADOOP_COMMON_HOME=${HADOOP_DEV_HOME}
export HADOOP_HDFS_HOME=${HADOOP_DEV_HOME}
export YARN_HOME=${HADOOP_DEV_HOME}
export HADOOP_CONF_DIR=${HADOOP_DEV_HOME}/etc/hadoop
export HDFS_CONF_DIR=${HADOOP_DEV_HOME}/etc/hadoop
export YARN_CONF_DIR=${HADOOP_DEV_HOME}/etc/hadoop
export HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_DEV_HOME}/lib/native
export JAVA_LIBRARY_PATH=${HADOOP_DEV_HOME}/lib/native

#java setting
export JAVA_HOME=/usr/java/jdk1.7.0_67
export PATH=$JAVA_HOME/bin:$PATH


#hadoop调试信息 方便错误调试!临时环境变量即可
export HADOOP_ROOT_LOGGER=DEBUG,console

web地址
http://master:8088
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值