确认已安装mysql8
在Linux命令行执行service mysqld start & chkconfig mysqld on启动mysql服务并设置开机自启
1.将安装包apache-hive-3.1.2-bin.tar.gz上传到/opt/soft目录下
2.解压安装包到/opt/model目录下,解压命令:
tar -zxf apache-hive-3.1.2-bin.tar.gz -C /opt/model
3.进入到Hive的安装目录的conf目录下,复制hive-env.sh.template为hive-env.sh
修改hive-env.sh文件,在末尾添加如下内容
export HADOOP_HOME=/usr/local/hadoop-3.1.4
4.touch hive-site.xml新建配置文件,加入内容:
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<description>Hive使用该目录存储不同map/reduce阶段的计划及阶段的中间输出</description>
<name>hive.exec.scratchdir</name>
<value>hdfs://master:8020/user/hive/tmp</value>
</property>
<property>
<description>元数据库位置</description>
<name>hive.metastore.warehouse.dir</name>
<value>hdfs://master:8020/user/hive/warehouse</value>
</property>
<property>
<description>Hive运行时结构化日志文件的位置</description>
<name>hive.querylog.location</name>
<value>hdfs://master:8020/user/hive/log</value>
</property>
<property>
<description>目标元数据库</description>
<name>hive.metastore.uris</name>
<value>thrift://master:9083</value>
</property>
<property>
<description>元数据连接字符串</description>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://localhost:3306/hive?createDatabaseIfNotExist=true</value>
</property>
<property>
<description>元数据连接驱动类名</description>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.cj.jdbc.Driver</value>
</property>
<property>
<description>元数据连接时的用户名</description>
<name>javax.jdo.option.ConnectionUserName</name>
<value>root</value>
</property>
<property>
<description>元数据连接时的用户密码</description>
<name>javax.jdo.option.ConnectionPassword</name>
<value>123456</value>
</property>
<property>
<description>允许多线程同时通过JDO访问元数据</description>
<name>javax.jdo.option.Multithreaded</name>
<value>true</value>
</property>
<property>
<description>HiveServer2 Thrift接口的端口号</description>
<name>hive.server2.thrift.port</name>
<value>10000</value>
</property>
<property>
<description>运行HiveServer2 Thrift接口的主机</description>
<name>hive.server2.thrift.bind.host</name>
<value>localhost</value>
</property>
</configuration>
5.上传mysql驱动mysql-connector-java-8.0.26到/opt/model/hive-3.1.2/lib目录
将hive安装目录lib下的jline-2.12.jar同步到hadoop类库中,勿忘子节点同样需要同步
cp /opt/model/apache-hive-3.1.2-bin/lib/jline-2.12.jar /usr/local/hadoop-3.1.4/share/hadoop/yarn/lib/
scp /opt/model/apache-hive-3.1.2-bin/lib/jline-2.12.jar slave1:/usr/local/hadoop-3.1.4/share/hadoop/yarn/lib/
scp /opt/model/apache-hive-3.1.2-bin/lib/jline-2.12.jar slave2:/usr/local/hadoop-3.1.4/share/hadoop/yarn/lib/
删除hive安装目录lib下的guava-19.0.0.jar包,并将hadoop类库中的新版guava包同步过来
rm -f /opt/model/hive-3.1.2-bin/lib/guava-19.0.jar
cp /usr/local/hadoop-3.1.4/share/hadoop/common/lib/guava-27.0-jre.jar /opt/moudel/hive-3.1.2/lib/
6.设置环境变量vi /etc/profile,添加:
#HIVE
export HIVE_HOME=/opt/moudel/hive-3.1.2
export PATH=$PATH:$HIVE_HOME/bin
source /etc/profile使配置生效
7.进入mysql数据库mysql -uroot -p123456
create database hive;
alter database hive character set latin1;
8.在Linux命令行初始化元数据库,进入hive安装包bin目录
./schematool -dbType mysql -initSchema
(若未安装schematool,yum -y install schematool)
9.启动Hive(前提:已开启hadoop以及mysql服务)
hive --service metastore &