*
*CentOs6.5
*
*/
--设置环境变量 vi /etc/profile
JAVA_HOME=/opt/jdk1.7.0_25
JRE_HOME=/opt/jdk1.7.0_25/jre
CLASS_PATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:$JRE_HOME/lib
PATH=$PATH:$JAVA_HOME/bin:$JRE_HOME/bin
export JAVA_HOME JRE_HOME CLASS_PATH PATH
source /etc/profile --生效
java -version --验证
chown -R hadoopa:hadoopa /opt/server
chmod -R 755 /opt/server
#CentOS修改主机名和hosts文件
vim /etc/sysconfig/network
hostname hadoopa #立即生效
vi /etc/hosts
192.168.61.139 hadoopa
rpm -qa|grep ssh #查看SSH client、SSH server
ssh localhost
cd ~/.ssh/ # 若没有该目录,请先执行一次ssh localhost
ssh-keygen -t rsa # 会有提示,都按回车就可以
cat id_rsa.pub >> authorized_keys # 加入授权
chmod 600 ./authorized_keys # 修改文件权限
exit #退出
--设置环境变量 vi /etc/profile
HADOOP_HOME=/opt/server/hadoop-2.7.3
PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin
export HADOOP_HOME
cd /opt/server/hadoop-2.7.3/etc/hadoop
vi hadoop-env.sh
export JAVA_HOME=/opt/jdk1.7.0_25
vi yarn-env.sh
export JAVA_HOME=/opt/jdk1.7.0_25
#core-site.xml
<configuration>
<property>
<name>hadoop.tmp.dir</name>
<value>file:/opt/server/hadoop-2.7.3/data/tmp</value>
</property>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value>
</property>
</configuration>
#hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:/opt/server/hadoop-2.7.3/data/tmp/dfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:/opt/server/hadoop-2.7.3/data/tmp/dfs/data</value>
</property>
</configuration>
cp mapred-site.xml.template mapred-site.xml
#mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
#yarn-site.xml
<configuration>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>
cd /opt/server/hadoop-2.7.3/bin
hdfs namenode -format
cd /opt/server/hadoop-2.7.3/sbin
./start-all.sh
jps
netstat -tnlp
#启动
hadoop-daemons.sh start namenode
hadoop-daemons.sh start datanode
hadoop-daemons.sh start secondarynamenode
hadoop-daemons.sh start jobtracker
hadoop-daemons.sh start tasktracker
#停止
hadoop-daemons.sh stop tasktracker
hadoop-daemons.sh stop jobtracker
hadoop-daemons.sh stop secondarynamenode
hadoop-daemons.sh stop datanode
hadoop-daemons.sh stop namenode
http://192.168.61.139:50070 #HDFS
http://192.168.61.139:8088/