Hive安装(3.1.3)
1.下载&解压安装包
#下载解压hive
wget https://dlcdn.apache.org/hive/hive-3.1.3/apache-hive-3.1.3-bin.tar.gz
tar -zxvf hbase-2.4.9-bin.tar.gz
2.修改hive-env.sh
#复制模板文件
cp hive-env.sh.template hive-env.sh
#编辑hive-env.sh,在文件最后追加环境变量
vi hive-env.sh
export HADOOP_HOME=/home/hadoop/hadoop-3.3.1
export HIVE_HOME=/home/hive/apache-hive-3.1.3-bin
3.修改hive-site.xml
#复制模板文件
cp hive-default.xml.template hive-site.xml
#编辑hive-site.xml
vi hive-site.xml
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://IP:3306/hive?serverTimezone=Asia/Shanghai</value>
<description></description>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.cj.jdbc.Driver</value>
<description>Driver class name for a JDBC metastore</description>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>root</value>
<description>Username to use against metastore database</description>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>123456</value>
<description>password to use against metastore database</description>
</property>
#Directory where structured hive query logs are created. One file per session is created in this directory. If this variable set to empty string structured log will not be created
<property>
<name>hive.querylog.location</name>
<value>/home/hive/querylog</value>
<description>Location of Hive run time structured log file</description>
</property>
#This directory is used for temporary files when Hive runs in local mode
<property>
<name>hive.exec.local.scratchdir</name>
<value>/home/hive/scratchdir</value>
<description>Local scratch space for Hive jobs</description>
</property>
#Temporary local directory for added resources in the remote file system
<property>
<name>hive.downloaded.resources.dir</name>
<value>/home/hive/resources</value>
<description>Temporary local directory for added resources in the remote file system.</description>
</property>
<property>
<name>hive.metastore.warehouse.dir</name>
<value>/user/hive/warehouse</value>
<description>location of default database for the warehouse</description>
</property>
#将mysql驱动包上传到live的lib目录下
/home/hive/apache-hive-3.1.3-bin/lib
4.配置hive日志
#复制日志模板文件
cp hive-log4j2.properties.template hive-log4j2.properties
#修改日志级别
property.hive.log.level = WARN
#修改日志路径
property.hive.log.dir = /home/hive/apache-hive-3.1.3-bin/logs
#复制执行日志模板文件
cp hive-exec-log4j2.properties.template hive-exec-log4j2.properties
#修改执行日志级别
property.hive.log.level = WARN
#修改执行日志路径
property.hive.log.dir = /home/hive/apache-hive-3.1.3-bin/exeLogs
5.修改hadoop下的core-site.xml
#a superuser can submit jobs or access hdfs on behalf of another user
<property>
<name>hadoop.proxyuser.super.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.super.groups</name>
<value>*</value>
</property>
#同步到其它hadoop节点
scp core-site.xml hadoop02:/home/hadoop/hadoop-3.3.1/etc/hadoop
6.重启hadoop集群
#停止集群
./stop-all.sh
#启动集群
./start-all.sh
7.hive初始化
bin/schematool -dbType mysql -initSchema