解决centos6的yum仓库闭源问题
1.sed -i "s|enabled=1|enabled=0|g" /etc/yum/pluginconf.d/fastestmirror.conf
2.mv /etc/yum.repos.d/CentOS-Base.repo /etc/yum.repos.d/CentOS-Base.repo.backup
3.curl -o /etc/yum.repos.d/CentOS-Base.repo https://www.xmpan.com/Centos-6-Vault-Aliyun.repo
4.yum clean all
5.yum makecache
网卡问题处理
vim /etc/udev/rules.d/70-persistemt-net.rules
删掉第一个,之后,将eth1改为eth0
vim /etc/sysconfig/network-scripts/ifcfg-eth0
删除物理地址哪一行
修改IP地址,并且网段要一致
#查看防火墙状态
service iptables seatus
#启动防火墙服务
service iptables start
#重启 防火墙 服务
service iptables restart
#彻底关闭防火墙
chkconfig --list iptables
# 关闭selinux
vim /etc/selinux/config
# 修改主机名
vim /etc/sysconfig/network
# ip地址配置别名
vim /etc/hosts
192.168.42.130 hadoop01
192.168.42.131 hadoop02
192.168.42.132 hadoop03
#ssh 免密码登录
第一步:ssh-keygen -t rsa
第二步:ssh-copy-id hadoop01
第三步:ssh-copy-id hadoop02
第四步:ssh-copy-id hadoop03
# 三台机器时钟同步
ntpdate ntp4.aliyun.com
配置hadoop的环境
vi hadoop-env-sh
添加java的环境变量,也就是java_home
which java
vi core-site,xml
<property>
<name>fs.defaultFS</name>
<value>hdfs://hadoop01:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/export/hdfs-data/tmp</value>
</property>
vi hdfs-site,xml
<property>
<name>dfs.replication</name>
<value>2</value>
</property>
<property>
<name>dfs.namenode.secondary.http-address</name>
<value>192.168.42.132:50090</value>
</property>配置节点
vi slaves
配置主机名
#scp远程命令
scp -r /export/install/hadoop-2.7.2/ root@hadoop02:/export/install/
scp -r /export/install/hadoop-2.7.2/ root@hadoop03:/export/install/
拷贝环境变量scp -r /etc/profile root@hadoop02:/etc/
#编写sh的命令以及脚本