Hadoop环境搭建(八)--Hive的搭建一

一、Hive搭建方法一

此方法需在windows安装Mysql数据库

  1. Hive搭建

1)将apache-hive-3.1.2-bin.tar.gz压缩包放在/opt/software路径下

2)、[root@hadoop100 ~]# cd /opt/software

3)、解压apache-hive-3.1.2-bin.tar.gz到/opt/module/路径下

[root@hadoop100 software]# tar -zxvf apache-hive-3.1.2-bin.tar.gz -C /opt/module/

4)、进入解压之后的apache-hive-3.1.2-bin路径下的conf/文件夹下

[root@hadoop100 module]# cd /opt/module/apache-hive-3.1.2-bin/conf/

5)、创建并编辑hive-site.xml文件,输入以下内容:

注意:下列标红部分为按自己VMnet8的IP及数据库账号密码进行配置

[root@hadoop100 conf]# vi hive-site.xml

<?xml version="1.0" encoding="UTF-8" standalone="no"?>                            

<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>                                                        

<configuration>                                                                                    

  <!-- jdbc 连接的 URL -->                                                        

  <property>                                                                                    

    <name>javax.jdo.option.ConnectionURL</name>                                                        

                                                                                        <value>jdbc:mysql://192.168.1.5:3306/hive?useSSL=false&characterEncoding=utf8&a                            mp;serverTimezone=UTC</value>                                                                                    

  </property>                                                                                    

  <!-- jdbc 连接的 Driver-->                                                        

  <property>                                                                                                        

    <name>javax.jdo.option.ConnectionDriverName</name>                                                    

    <value>com.mysql.cj.jdbc.Driver</value>                                                    

  </property>                                                                                                        

  <!-- jdbc 连接的 username-->                                                    

  <property>                                                                                                        

    <name>javax.jdo.option.ConnectionUserName</name>                                                    

    <value>root</value>                                                                                                        

  </property>                                                                                                        

  <!-- jdbc 连接的 password -->                                                                                                        

  <property>                                                                                                        

    <name>javax.jdo.option.ConnectionPassword</name>                                                                                                        

    <value>jyl010212..</value>                                                                                                        

  </property>                                                                                                        

  <!-- Hive 元数据存储版本的验证 -->                                                                                                        

  <property>                                                                                                        

    <name>hive.metastore.schema.verification</name>                                                    

    <value>false</value>                                                                                                        

  </property>                                                                                                        

  <!--元数据存储授权-->                                                                                                        

  <property>                                                                                                        

    <name>hive.metastore.event.db.notification.api.auth</name>                                                    

    <value>false</value>                                                                                                        

  </property>                                                                                                        

  <!-- 指定存储元数据要连接的地址 -->                                                                                                        

  <property>                                                                                                        

    <name>hive.metastore.uris</name>                                                                                                        

    <value>thrift://localhost:9083</value>                                                                                                        

  </property>                                                                                                        

  <!-- 指定 hiveserver2 连接的 host -->                                                                                                        

  <property>                                                                                                        

    <name>hive.server2.thrift.bind.host</name>                                                                                                        

    <value>localhost</value>                                                                                                        

  </property>                                                                                                        

  <!-- 指定 hiveserver2 连接的端口号 -->                                                                                                        

  <property>                                                                                                        

    <name>hive.server2.thrift.port</name>                                                    

    <value>10000</value>                                                                                                        

  </property>                                                                                                        

  <property>                                                                                                        

    <name>hive.cli.print.header</name>                                                                                                        

    <value>true</value>                                                                                                        

  </property>                                                                                                                                                                                                                

  <property>                                                                                                        

    <name>hive.cli.print.current.db</name>                                                                                                        

    <value>true</value>                                                                                                 

  </property>                                                                                             

</configuration>                                                                                   

6)、将mysql的jar包放在/opt/module/apache-hive-3.1.2-bin/lib路径下

7)、查看lib目录下mysql文件是否齐全

[root@hadoop100 conf]# ls /opt/module/apache-hive-3.1.2-bin/lib/ | grep mysql

8)、将/opt/module/hadoop-3.1.3/share/hadoop/common/lib/guava-27.0-jre.jar剪切到/opt/module/apache-hive-3.1.2-bin/lib路径下

[root@hadoop100 apache-hive-3.1.2-bin]# cd /opt/module/apache-hive-3.1.2-bin/

[root@hadoop100 apache-hive-3.1.2-bin]# mv /opt/module/hadoop-3.1.3/share/hadoop/common/lib/guava-27.0-jre.jar ./lib/

出现如下提示直接按回车即可

9)、初始化mysql

[root@hadoop100 apache-hive-3.1.2-bin]# bin/schematool -dbType mysql -initSchema -verbose

10)、后台启动metastore及hiveserver2服务(若想脚本启动,可看按第三节进行配置)

[root@hadoop100 apache-hive-3.1.2-bin]# nohup bin/hive --service metastore &

[root@hadoop100 apache-hive-3.1.2-bin]# nohup bin/hive --service hiveserver2 &

11)、启动hive

[root@hadoop100 apache-hive-3.1.2-bin]# hive

看到如下界面,hive正常启动

(which: no hbase只是hbase没有装而已。对hive不影响,不是错误)

which: no hbase in (/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/opt/module/jdk1.8.0_212/bin:/opt/module/hadoop-3.1.3/bin:/opt/module/hadoop-3.1.3/sbin:/opt/module/hive/bin:/home/soft863/.local/bin:/home/soft863/bin)

Hive Session ID = 36f90830-2d91-469d-8823-9ee62b6d0c26

Logging initialized using configuration in jar:file:/opt/module/hive/lib/hive-common-3.1.2.jar!/hive-log4j2.properties Async: true

Hive Session ID = 14f96e4e-7009-4926-bb62-035be9178b02

hive>

2Hive环境变量配置如下:

1)修改profile文件

[root@hadoop100 apache-hive-3.1.2-bin]# vi /etc/profile

添加内容如下:

HIVE_HOME=/opt/module/apache-hive-3.1.2-bin

修改内容如下:

PATH=$PATH:$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$HIVE_HOME/bin

export PATH JAVA_HOME HADOOP_HOME HIVE_HOME

  1. 刷新/etc/profile

3)[root@hadoop100 apache-hive-3.1.2-bin]# source /etc/profile

3、编写脚本后台启动metastore和hiveserver2

  1. 为了方便使用,可以直接编写脚本来管理服务的启动和关闭

[soft863@hadoop102 hive]$ vim $HIVE_HOME/bin/hiveservices.sh

内容如下:此脚本的编写不要求掌握。直接拿来使用即可。

#!/bin/bash

HIVE_LOG_DIR=$HIVE_HOME/logs

if [ ! -d $HIVE_LOG_DIR ]

then

mkdir -p $HIVE_LOG_DIR

fi

#检查进程是否运行正常,参数1为进程名,参数2为进程端口

function check_process()

{

    pid=$(ps -ef 2>/dev/null | grep -v grep | grep -i $1 | awk '{print $2}')

    ppid=$(netstat -nltp 2>/dev/null | grep $2 | awk '{print $7}' | cut -d '/' -f 1)

    echo $pid

    [[ "$pid" =~ "$ppid" ]] && [ "$ppid" ] && return 0 || return 1

}

function hive_start()

{

    metapid=$(check_process HiveMetastore 9083)

    cmd="nohup hive --service metastore >$HIVE_LOG_DIR/metastore.log 2>&1 &"

    cmd=$cmd" sleep 4; hdfs dfsadmin -safemode wait >/dev/null 2>&1"

    [ -z "$metapid" ] && eval $cmd || echo "Metastroe服务已启动"

    server2pid=$(check_process HiveServer2 10000)

    cmd="nohup hive --service hiveserver2 >$HIVE_LOG_DIR/hiveServer2.log 2>&1 &"

    [ -z "$server2pid" ] && eval $cmd || echo "HiveServer2服务已启动"

}

function hive_stop()

{

    metapid=$(check_process HiveMetastore 9083)

    [ "$metapid" ] && kill $metapid || echo "Metastore服务未启动"

    server2pid=$(check_process HiveServer2 10000)

    [ "$server2pid" ] && kill $server2pid || echo "HiveServer2服务未启动"

}

case $1 in

"start")

    hive_start

    ;;

"stop")

    hive_stop

    ;;

"restart")

    hive_stop

    sleep 2

    hive_start

    ;;

"status")

    check_process HiveMetastore 9083 >/dev/null && echo "Metastore服务运行正常" || echo "Metastore服务运行异常"

    check_process HiveServer2 10000 >/dev/null && echo "HiveServer2服务运行正常" || echo "HiveServer2服务运行异常"

    ;;

*)

    echo Invalid Args!

    echo 'Usage: '$(basename $0)' start|stop|restart|status'

    ;;

esac

2)添加执行权限

[soft863@hadoop102 hive]$ chmod +x $HIVE_HOME/bin/hiveservices.sh

3)启动Hive后台服务

[soft863@hadoop102 hive]$ hiveservices.sh start

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值