Hadoop组件单机版——shell脚本一键安装

在/opt/下存放安装的各个软件,然后安装到/opt/soft/下
在这里插入图片描述

#! /bin/bash
echo 'full system install begining....'

#global var 
jdk=true
mysql=false
hadoop=true
hive=true
zookeeper=true
hbase=true
sqoop=true
spark=true
flume=false
kafka=false
flink=false

#rpm -qa | grep java 
#rpm -e --nodeps ...

#setup jdk 1.8.111
if [ "$jdk" = true ];then
	mkdir /opt/soft
	echo 'setup jdk 1.8.111'
	tar -zxf /opt/jdk-8u111-linux-x64.tar.gz -C /opt/soft
	mv /opt/soft/jdk1.8.0_111 /opt/soft/jdk180
	echo '#jdk' >> /etc/profile
	echo 'export JAVAHOME=/opt/soft/jdk180' >> /etc/profile
	echo 'export CLASSPATH=.:$JAVA_HOME/jre/lib/rt.jar:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar' >> /etc/profile
	echo 'export PATH=$JAVAHOME/bin:$PATH' >> /etc/profile
fi

#setup mysql 5.7
if [ "$mysql" = true ];then
	echo 'setup mysql 5.7'
	var1=rpm -qa|grep mariadb
	yum remove -y $var1
	cd /opt/
	wget  http://repo.mysql.com/mysql-community-release-el7-5.noarch.rpm
	rpm  -ivh mysql-community-release-el7-5.noarch.rpm
	yum -y install mysql-server
	chown -R root:root /var/lib/mysql
	chown root /var/lib/mysql
	systemctl restart mysql
	mysql -h localhost -P3306 -uroot  mysql  --default-character-set=utf8 -e "use mysql;update user set password=password('root') where user='root';grant all on *.* to root@'%' identified by 'root';flush privileges;"
	sed -i '22a\character-set-server=utf8' /etc/my.cnf
	systemctl restart mysql
fi

#setup hadoop260
if [ "$hadoop" = true ];then
	echo 'setup hadoop cdh 5.14.2 - hadoop2.6'
	tar -zxf /opt/hadoop-2.6.0-cdh5.14.2.tar.gz -C /opt/soft
	mv /opt/soft/hadoop-2.6.0-cdh5.14.2 /opt/soft/hadoop260
	
	echo "-----------hadoop-env.sh-------------"
	echo "export JAVA_HOME=/opt/soft/jdk180" >> /opt/soft/hadoop260/etc/hadoop/hadoop-env.sh
	
	echo "-----------core-site.xml-------------"
	val1="/opt/soft/hadoop260/etc/hadoop/core-site.xml"
	sed -i '19a\<property><name>hadoop.proxyuser.root.groups</name><value>*</value></property>' $val1
	sed -i '19a\<property><name>hadoop.proxyuser.root.users</name><value>*</value></property>' $val1
	sed -i '19a\<property><name>hadoop.tmp.dir</name><value>/opt/soft/hadoop260/tmp</value></property>' $val1
	sed -i "19a\<property><name>fs.defaultFS</name><value>hdfs://hadoop02:9000</value></property>" $val1
	
	echo "-----------hdfs-site.xml-------------"
	val2="/opt/soft/hadoop260/etc/hadoop/hdfs-site.xml"
	sed -i '19a\<property><name>dfs.permissions</name><value>false</value></property>' $val2
	sed -i '19a\<property><name>dfs.replication</name><value>1</value></property>' $val2
	
	echo "-----------mapred-site.xml-------------"
	cp /opt/soft/hadoop260/etc/hadoop/mapred-site.xml.template /opt/soft/hadoop260/etc/hadoop/mapred-site.xml
	val3="/opt/soft/hadoop260/etc/hadoop/mapred-site.xml"
	sed -i '19a\<property><name>mapreduce.framework.name</name><value>yarn</value></property>' $val3
	
	echo "-----------yarn-site.xml-------------"
	val4="/opt/soft/hadoop260/etc/hadoop/yarn-site.xml"
	sed -i '15a\<property><name>yarn.nodemanager.aux-services</name><value>mapreduce_shuffle</value></property>' $val4
	sed -i '15a\<property><name>yarn.resourcemanager.localhost</name><value>localhost</value></property>' $val4
	
	sed -i 's/localhost/hadoop02/g' /opt/soft/hadoop260/etc/hadoop/slaves
	echo '#hadoop' >> /etc/profile
	echo 'export HADOOP_HOME=/opt/soft/hadoop260' >> /etc/profile
	echo 'export HADOOP_MAPRED_HOME=$HADOOP_HOME' >> /etc/profile
	echo 'export HADOOP_COMMON_HOME=$HADOOP_HOME' >> /etc/profile
	echo 'export HADOOP_HDFS_HOME=$HADOOP_HOME' >> /etc/profile
	echo 'export YARN_HOME=$HADOOP_HOME' >> /etc/profile
	echo 'export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native' >> /etc/profile
	echo 'export PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin' >> /etc/profile
	echo 'export HADOOP_INSTALL=$HADOOP_HOME' >> /etc/profile
	
	source /etc/profile
	cd /opt/soft/hadoop260/bin
	hadoop namenode -format
	tar -xf /opt/hadoop-native-64-2.6.0.tar -C /opt/soft/hadoop260/lib/native/
fi



#setup hive
if [ "$hive" = true ];then
	echo 'setup hive110'
	tar -zxf /opt/hive-1.1.0-cdh5.14.2.tar.gz -C  /opt/soft/
	mv /opt/soft/hive-1.1.0-cdh5.14.2/ /opt/soft/hive110
	cp /opt/mysql-connector-java-5.1.38.jar /opt/soft/hive110/lib/
	cd /opt/soft/hive110/conf
	echo "配置hive-site.xml文件"
	touch hive-site.xml
	echo "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>
	<?xml-stylesheet type=\"text/xsl\" href=\"configuration.xsl\"?>
	<configuration>
		<property>
			<name>hive.metastore.warehouse.dir</name>
			<value>/hive110/warehouse</value>
		</property>
		<property>
			<name>hive.metastore.local</name>
			<value>true</value>
		</property>
		<property>
			<name>javax.jdo.option.ConnectionURL</name>
			<value>jdbc:mysql://hadoop02:3306/hive110?createDatabaseIfNotExist=true</value>
		</property>
		<property>
			<name>javax.jdo.option.ConnectionDriverName</name>
			<value>com.mysql.jdbc.Driver</value>
		</property>
		<property>
			<name>javax.jdo.option.ConnectionUserName</name>
			<value>root</value>
		</property>
		<property>
			<name>javax.jdo.option.ConnectionPassword</name>
			<value>root</value>
		</property>
		<property>
			<name>hive.server2.authentication</name>
			<value>NONE</value>
		</property>
		<property>
			<name>hive.server2.thrift.client.user</name>
			<value>root</value>
		</property>
		<property>
			<name>hive.server2.thrift.client.password</name>
			<value>root</value>
		</property>
	</configuration>" >  /opt/soft/hive110/conf/hive-site.xml

	echo "#hive" >> /etc/profile
	echo 'export HIVE_HOME=/opt/soft/hive110' >> /etc/profile
	echo 'export PATH=$PATH:$HIVE_HOME/bin' >> /etc/profile
	source /etc/profile
	schematool -dbType mysql -initSchema
	cd /opt
fi

#setup zookeeper
if [ "$zookeeper" = true ];then
	echo 'setup zookeeper'
	tar -zxf /opt/zookeeper-3.4.5-cdh5.14.2.tar.gz -C /opt/soft/
	mv /opt/soft/zookeeper-3.4.5-cdh5.14.2 /opt/soft/zk345
	cp /opt/soft/zk345/conf/zoo_sample.cfg /opt/soft/zk345/conf/zoo.cfg
	mkdir -p /opt/soft/zk345/datas
	sed -i '12c dataDir=/opt/soft/zk345/datas' /opt/soft/zk345/conf/zoo.cfg 
	echo "server.0=$1:2287:3387" >> /opt/soft/zk345/conf/zoo.cfg 
	echo '#zookeeper' >> /etc/profile
	echo 'export ZOOKEEPER_HOME=/opt/soft/zk345' >> /etc/profile
	echo 'export PATH=$PATH:$ZOOKEEPER_HOME/bin' >> /etc/profile
fi

#setup hbase

if [ "$hbase" = true ];then
	echo 'setup hbase'
	tar -zxf /opt/hbase-1.2.0-cdh5.14.2.tar.gz -C /opt/soft/
	mv /opt/soft/hbase-1.2.0-cdh5.14.2/ /opt/soft/hbase120
	echo "export JAVA_HOME=/opt/soft/jdk180" >> /opt/soft/hbase120/conf/hbase-env.sh
	echo "export HBASE_MANAGES_ZK=false" >> /opt/soft/hbase120/conf/hbase-env.sh

	val5="/opt/soft/hbase120/conf/hbase-site.xml"
	sed -i "23a\<property><name>hbase.zookeeper.property.clientPort</name><value>2181</value></property>" $val5
	sed -i "23a\<property><name>hbase.zookeeper.property.dataDir</name><value>/opt/soft/zk345/datas</value></property>" $val5
	sed -i "23a\<property><name>hbase.cluster.distributed</name><value>true</value></property>" $val5
	sed -i "23a\<property><name>hbase.rootdir</name><value>hdfs://hadoop02:9000/hbase</value></property>" $val5
	echo "#hbase" >> /etc/profile
	echo 'export HBASE_HOME=/opt/soft/hbase120' >> /etc/profile
	echo 'export PATH=$HBASE_HOME/bin:$HBASE_HOME/sbin:$PATH' >> /etc/profile
	source /etc/profile
fi

#setup sqoop

if [ "$sqoop" = true ];then
	echo 'setup sqoop'
	tar -zxf /opt/sqoop-1.4.6-cdh5.14.2.tar.gz -C /opt/soft/
	mv /opt/soft/sqoop-1.4.6-cdh5.14.2/ /opt/soft/sqoop146
	cp /opt/soft/sqoop146/conf/sqoop-env-template.sh /opt/soft/sqoop146/conf/sqoop-env.sh
	echo 'export HADOOP_COMMON_HOME=/opt/soft/hadoop260' >> /opt/soft/sqoop146/conf/sqoop-env.sh
	echo 'export HADOOP_MAPRED_HOME=/opt/soft/hadoop260' >> /opt/soft/sqoop146/conf/sqoop-env.sh
	echo 'export HIVE_HOME=/opt/soft/hive110' >> /opt/soft/sqoop146/conf/sqoop-env.sh
	echo 'export HBASE_HOME=/opt/soft/hbase120' >> /opt/soft/sqoop146/conf/sqoop-env.sh
	echo 'export ZOOCFGDIR=/opt/soft/zk345/conf' >> /opt/soft/sqoop146/conf/sqoop-env.sh
	echo 'export ZOOKEEPER_HOME=/opt/soft/zk345' >> /opt/soft/sqoop146/conf/sqoop-env.sh
	cp /opt/mysql-connector-java-5.1.38.jar /opt/soft/sqoop146/lib/
	cp /opt/java-json.jar /opt/soft/sqoop146/lib/
	cp /opt/soft/hive110/lib/hive-common-1.1.0-cdh5.14.2.jar /opt/soft/sqoop146/lib/
	cp /opt/soft/hive110/lib/hive-jdbc-1.1.0-cdh5.14.2-standalone.jar /opt/soft/sqoop146/lib/
	echo '#sqoop' >> /etc/profile
	echo 'export SQOOP_HOME=/opt/soft/sqoop146' >> /etc/profile
	echo 'export PATH=$PATH:$SQOOP_HOME/bin' >> /etc/profile
fi

#setup spark 
if [ "$spark" = true ];then
	echo 'setup spark'
	tar -zxf /opt/spark-2.3.4-bin-hadoop2.6.tgz -C /opt/soft/
	mv /opt/soft/spark-2.3.4-bin-hadoop2.6 /opt/soft/spark234
	cp /opt/soft/spark234/conf/slaves.template /opt/soft/spark234/conf/slaves
	cp /opt/soft/spark234/conf/spark-env.sh.template /opt/soft/spark234/conf/spark-env.sh
	echo "export SPARK_MASTER_HOST=192.168.98.135" >>  /opt/soft/spark234/conf/spark-env.sh
	echo "export SPARK_MASTER_PORT=7077" >>  /opt/soft/spark234/conf/spark-env.sh
	echo "export SPARK_WORKER_CORES=2" >>  /opt/soft/spark234/conf/spark-env.sh
	echo "export SPARK_WORKER_MEMORY=3g" >>  /opt/soft/spark234/conf/spark-env.sh
	echo "export SPARK_MASTER_WEBUI_PORT=8888" >>  /opt/soft/spark234/conf/spark-env.sh
	echo "export JAVA_HOME=/opt/soft/jdk180" >> /opt/soft/spark234/sbin/spark-config.sh
fi

安装完后
在这里插入图片描述

  • 1
    点赞
  • 15
    收藏
    觉得还不错? 一键收藏
  • 2
    评论
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值