1. 下载jdk和hadoop
下载jdk-7u7-linux-i586.tar.gz和hadoop-2.6.0.tar.gz
http: //www.apache .org /dyn/closer.cgi /hadoop/common/
http: //www.oracle .com /technetwork/cn/java/javase/downloads/jdk7-downloads-1880260. html
sudo tar zxvf jdk-7 u7-linux-i586.tar .gz /home
sudo tar zxvf hadoop-2.6 .0 .tar .gz /home/hadoop001
2. 配置环境变量
sudo gedit /etc/profile
把jdk和hadoop路径加入到profile文件
export JAVA_HOME=/home/jdk1.7.0 _07
export HADOOP_HOME=/home/hadoop001/hadoop-2.6 .0
export HADOOP_CONF_DIR=$HADOOP_HOME /etc/hadoop
export CLASSPATH=.:$JAVA_HOME /lib/dt.jar:$JAVA_HOME /lib/tools.jar:
export PATH=$PATH :$JAVA_HOME /bin:$HADOOP_HOME /bin:$HADOOP_HOME /sbin:
source /etc/profile
java version
hadoop version
3. 配置hadoop
/home/hadoop001/hadoop-2.6.0/etc/hadoop/hadoop-env.sh
#export JAVA_HOME =${JAVA_HOME }
export JAVA_HOME =/home/jdk1.7.0 _07
/home/hadoop001/hadoop-2.6.0/etc/hadoop/yarn-env.sh
export JAVA_HOME=/home/jdk1.7.0 _07
/home/hadoop001/hadoop-2.6.0/etc/hadoop/core-site.xml
<configuration >
<property >
<name > hadoop.tmp.dir</name >
<value > /home/hadoop001/hadoop2data/tmp</value >
<description > </description >
</property >
<property >
<name > fs.defaultFS</name >
<value > hdfs://192.168.1.242:9000</value >
</property >
<property >
<name > io.file.buffer.size</name >
<value > 4096</value >
</property >
</configuration >
/home/hadoop001/hadoop-2.6.0/etc/hadoop/mapred-site.xml
<configuration >
<property >
<name > mapreduce.framework.name</name >
<value > yarn</value >
<final > true</final >
</property >
<property >
<name > mapreduce.jobhistory.address</name >
<value > 192.168.1.242:10020</value >
</property >
<property >
<name > mapreduce.jobhistory.webapp.address</name >
<value > 192.168.1.242:19888</value >
</property >
</configuration >
/home/hadoop001/hadoop-2.6.0/etc/hadoop/hdfs-site.xml
<configuration >
<property >
<name > dfs.namenode.name.dir</name >
<value > /home/hadoop001/hadoop2data/name</value >
</property >
<property >
<name > dfs.datanode.data.dir</name >
<value > /home/hadoop001/hadoop2data/data</value >
</property >
<property >
<name > dfs.replication</name >
<value > 1</value >
<description > 这个地方小心了,数值要小于集群机器,单机填1就好了</description >
</property >
<property >
<name > dfs.nameservices</name >
<value > hadoop-cluster1</value >
</property >
<property >
<name > dfs.namenode.secondary.http-address</name >
<value > 192.168.1.242:50090</value >
</property >
<property >
<name > dfs.webhdfs.enabled</name >
<value > true</value >
</property >
</configuration >
/home/hadoop001/hadoop-2.6.0/etc/hadoop/yarn-site.xml
<configuration >
<property >
<name > yarn.resourcemanager.hostname</name >
<value > 192.168.1.242</value >
</property >
<property >
<name > yarn.nodemanager.aux-services</name >
<value > mapreduce_shuffle</value >
</property >
<property >
<name > yarn.resourcemanager.address</name >
<value > 192.168.1.242:8032</value >
</property >
<property >
<name > yarn.resourcemanager.scheduler.address</name >
<value > 192.168.1.242:8030</value >
</property >
<property >
<name > yarn.resourcemanager.resource-tracker.address</name >
<value > 192.168.1.242:8031</value >
</property >
<property >
<name > yarn.resourcemanager.admin.address</name >
<value > 192.168.1.242:8033</value >
</property >
<property >
<name > yarn.resourcemanager.webapp.address</name >
<value > 192.168.1.242:8088</value >
</property >
<property >
<name > yarn.log.dir</name >
<value > /home/hadoop001/hadoop2data/yarn_log</value >
</property >
</configuration >
hdfs namenode -format
hadoop namenode -format
start -all .sh
stop-all .sh
4. 遇到的问题
我碰到这个问题是因为没有使用命令格式化文件系统,使用: hadoop namenode -format 格式化后就可以访问了
/lib/ld-linux.so.2: bad ELF interpreter No such file or directory解决
是因为64 位系统中安装了32 位程序
解决方法: yum install glibc.i686
yum出错Error: Cannot find a valid baseurl for repo: base
sudo gedit /etc/resolv.conf
在文件最后配置: nameserver 8.8 .8.8
chmod 777 /etc/sudoers
在root用户下面添加你的用户的权限
root ALL=(ALL) ALL
你的用户名 ALL=(ALL) ALL
chmod 440 /etc/sudoers