Hive3.1.2版本部署

安装Hive

wget https://dlcdn.apache.org/hive/hive-3.1.2/apache-hive-3.1.2-bin.tar.gz
解压
cd $HIVE_HOME/conf
cp hive-default.xml.template hive-site.xml

# profile文件
# 配置环境变量

export JAVA_HOME=/usr/java/default
export CLASSPATH=.:$JAVA_HOME/jre/lib/rt.jar:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
export PATH=$JAVA_HOME/bin:$PATH
export HADOOP_HOME=/opt/bigdata/hadoop/current
export HADOOP_MAPRED_HOME=${HADOOP_HOME}
export HADOOP_COMMON_HOME=${HADOOP_HOME}
export HADOOP_HDFS_HOME=${HADOOP_HOME}
export HADOOP_YARN_HOME=${HADOOP_HOME}
export HADOOP_CONF_DIR=/opt/bigdata/hadoop/current/etc/hadoop
export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin

export ZOOKEEPER_HOME=/opt/bigdata/zookeeper-3.4.6
export PATH=$PATH:$ZOOKEEPER_HOME/bin

export HIVE_HOME=/opt/bigdata/hive/current
export PATH=$PATH:$HIVE_HOME/bin
export SPARK_HOME=/opt/bigdata/spark/current
export PATH=$PATH:$SPARK_HOME/bin


# Mysql数据库表创建
CREATE DATABASE `hive3_remote` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */;
CREATE USER 'hive3'@'192.168.%' IDENTIFIED BY '123456';
GRANT ALL PRIVILEGES ON hive3_remote.* TO 'hive3'@'192.168.%';
FLUSH PRIVILEGES;

# 将上面的粘贴到/etc/profile末尾,然后一起发给其他主机,没有服务没关系
for i in {2,3,4,5};do scp /etc/profile node0${i}:/etc/profile ;done


# 拷贝 sql驱动
for i in {2..5} ; do scp -rp mysql-connector-java-5.1.46.jar  node0${i}:`pwd`;done

Hive Master

hive-site.xml
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
    <property>
        <name>hive.metastore.warehouse.dir</name>
        <value>/user/hive_remote/warehouse</value>
        <description>设置hdfs中的默认目录</description>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionURL</name>
        <value>jdbc:mysql://192.168.7.17:3306/hive3_remote?createDatabaseIfNotExist=true&amp;useSSL=false</value>
        <description>保存元数据的数据库连接</description>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionDriverName</name>
        <value>com.mysql.jdbc.Driver</value>
        <description>数据库驱动,需要拷贝到${HIVE_HOME}/lib目录</description>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionUserName</name>
        <value>hive3</value>
        <description>用户名和密码</description>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionPassword</name>
        <value>123456</value>
        <description>用户名和密码</description>
    </property>
<!--    <property>
        <name>hive.security.authorization.enabled</name>
        <value>true</value>
        <description>打开认证</description>
    </property>
    <property>
        <name>hive.server2.enable.doAs</name>
        <value>false</value>
        <description>代理,默认是true</description>
    </property>
    <property>
        <name>hive.users.in.admin.role</name>
        <value>root,god</value>
        <description>添加admin角色用户,可添加多个</description>
    </property>
    <property>
        <name>hive.security.authorization.manager</name>
        <value>org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory</value>
        <description>认证管理类</description>
    </property>
    <property>
        <name>hive.security.authenticator.manager</name>
        <value>org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator</value>
        <description>认证管理类</description>
    </property>-->
    <property>
        <name>hive.cli.print.header</name>
        <value>true</value>
    </property>
    <property>
        <name>hive.cli.print.current.db</name>
        <value>true</value>
    </property>
</configuration>

hive表的注释信息乱码的解决

#修改字段注释字符集
ALTER TABLE COLUMNS_V2 modify column COMMENT varchar(256) character set utf8mb4;
#修改表注释字符集
ALTER TABLE TABLE_PARAMS modify column PARAM_VALUE varchar(256) character set utf8mb4;
#修改分区参数,支持分区建用中文表示
ALTER TABLE PARTITION_PARAMS modify column PARAM_VALUE varchar(40000) character set utf8mb4;
ALTER TABLE PARTITION_KEYS modify column PKEY_COMMENT varchar(40000) character set utf8mb4;
#修改表名注释,支持中文表示
ALTER TABLE INDEX_PARAMS modify column PARAM_VALUE varchar(4000) character set utf8mb4;
#修改视图,支持视图中文
ALTER TABLE TBLS modify COLUMN VIEW_EXPANDED_TEXT mediumtext CHARACTER SET utf8mb4;
ALTER TABLE TBLS modify COLUMN VIEW_ORIGINAL_TEXT mediumtext CHARACTER SET utf8mb4;
 
#修改数据库名称注释
ALTER TABLE `DBS` CHANGE COLUMN `DESC` `DESC` VARCHAR(4000) CHARACTER SET 'utf8' NULL DEFAULT NULL ;

hive客户端节点

node02,node03,node04,node05

<configuration>
    <property>
        <name>hive.metastore.uris</name>
        <value>thrift://node01:9083</value>
        <description>metastore地址</description>
    </property>
</configuration>

分发hive包

[god@node01 hive]$ cd  /opt/bigdata/hive
[god@node01 hive]$ tar czf apache-hive-3.1.2-bin.gz apache-hive-3.1.2-bin
[god@node01 hive]$ for i in {2..5};do scp -rp apache-hive-3.1.2-bin.gz node0${i}:`pwd` ;done

# 远程执行命令
for i in {2..5};do ssh node0${i} "ls /opt/bigdata/hive" ;done

# 解压
for i in {2..5};do ssh node0${i} "cd /opt/bigdata/hive && tar xf apache-hive-3.1.2-bin.gz && ln -s /opt/bigdata/hive/apache-hive-3.1.2-bin /opt/bigdata/hive/current && rm -f /opt/bigdata/hive/apache-hive-3.1.2-bin.gz " ;done

# 不放心就查看一下
for i in {2..5};do ssh node0${i} "ls /opt/bigdata/hive" ;done

# 先分发客户端的配置
cd $HIVE_HOME/conf
cat > hive-site.xml <<-EOF 
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
    <property>
        <name>hive.metastore.uris</name>
        <value>thrift://node01:9083</value>
        <description>metastore地址</description>
    </property>
</configuration>
EOF

for i in {2..5};do scp -rp hive-site.xml node0${i}:`pwd` ;done


# 然后配置hive master节点
重新拷贝上面master的配置文件 hive-site.xml 很长的那个配置文件,不需要分发

启动 metastore 与 hiveserver2 服务

初始化 Hive 元数据

[god@node01 conf]$ schematool -dbType mysql  -initSchema

# 如果有报错
com.google.common.base.Preconditions.checkArgument

复制hadoop高版本的guava.jar包到hive
cp /opt/bigdata/hadoop/current/share/hadoop/common/lib/guava-27.0-jre.jar /opt/bigdata/hive/current/lib/

rm -f /opt/bigdata/hive/current/lib/guava-19.0.jar

启动 Metastore 服务

[god@node01 current]$ hive --service metastore

# 后台运行
nohup hive --service metastore 1> /opt/bigdata/hive/current/metastore.log 2>/opt/bigdata/hive/current/metastor_err.log  &


启动 Hiveserver2 服务

# JDBC/ODBC 接口连接 Hive  需要启动 Hiveserver2 服务
# 在node03启动hiveserver2
hive --service hiveserver2 等价于 hiveserver2

# 让 Hiveserver2 服务启动到后台
nohup hiveserver2 --hiveconf hive.server2.thrift.prot=10000 1>/opt/bigdata/hive/current/hiveserver.log 2> /opt/bigdata/hive/current/hiveserver.err &

# node04连接
beeline -u jdbc:hive2://node03:10000/default -n god
show tables;

# 使用beeline连接到hiveserver2服务,会在hdfs的/tmp/hive目录下创建一个用户名的目录,如果没有权限就报错
[god@node03 ~]$ hdfs dfs -ls /tmp/hive
Found 3 items
drwx-wx-wx   - god    supergroup          0 2021-10-24 12:24 /tmp/hive/_resultscache_
drwx------   - balala supergroup          0 2021-10-24 12:31 /tmp/hive/balala
drwx------   - god    supergroup          0 2021-10-24 12:31 /tmp/hive/god


hadoop  fs  -chmod 777  /tmp
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值