一Centos本地源
[root@kolla yum.repos.d]# cat Centos.repo
[local_server]
name=This is a local repo
baseurl=http://192.168.229.130:8099/yumInstallPackage/
enabled=1
gpgcheck=0
[root@kolla yum.repos.d]#
二ambari本地源
[root@kolla yum.repos.d]# cat ambari.repo
[Updates-ambari-2.5.2.0]
name=ambary Version - Updates
baseurl=http://192.168.229.130:8099/ambari/centos7
gpgcheck=0
enabled=1
priority=1
三HDP本地源
[root@kolla yum.repos.d]# cat HDP.repo
[HDP-2.6]
name=HDP-2.6
baseurl=http://192.168.229.130:8099/HDP/
path=/
enabled=1
gpgcheck=0
四HDP-UTILS本地源
gpgcheck=0[root@kolla yum.repos.d]# cat HDP-UTILS.repo
[HDP-UTILS-1.1.0.21]
name=HDP-UTILS-1.1.0.21
baseurl=http://192.168.229.130:8099/HDP-UTILS/
path=/
enabled=1
gpgcheck=0
安装大数据集群
[root@kolla ~]# cat install_hadoop.sh
#!/usr/bin/env sh
yum install ambari-server -y
ambari-server setup
ambari-server start
[root@kolla ~]# cat delete_ambar_cluster.sh
#!/usr/bin/env sh
ambari-server stop
ambari-agent stop
rm -rf /etc/ambari-*
yum remove hadoop_2* hdp-select* ranger_2* zookeeper* bigtop* atlas-metadata* ambari* spark* mysql* perl* oozie* smartsense-hst -y
yum remove extjs* oozie_2_5_3_0_37-client* slider* snappy* storm* postgresql* -y
SERVER_NAME="
ambari accumulo ambari-qa atlas flume hbase hdfs infra-solr knox mahout oozie sqoop tez zeppelin
activity_analyzer ams falcon hadoop hcat hive kafka livy mapred spark storm yarn zookeeper
"
rm -rf /var/log/smartsense/*
for serverName in $SERVER_NAME
do
userdel $serverName
#rm -rf /home/$serverName
rm -rf /var/run/$serverName*
rm -rf /tmp/$serverName*
rm -rf /var/log/$serverName*
rm -rf /usr/lib/$serverName*
rm -rf /var/lib/$serverName*
done
rm -rf /usr/lib/ambari*
rm -rf /var/lib/ambari*
rm -rf /var/lib/pgsql/data
rm -rf /usr/sbin/ambari_server_main.pyc
rm -rf /usr/hdp
rm -rf /opt/ambari*
rm -rf /kafka-logs
yum list | grep @HDP
python /usr/lib/python2.6/site-packages/ambari_agent/HostCleanup.py --silent --skip=users
yum install lrzsz -y
[root@kolla ~]#