安装Hive
wget https://dlcdn.apache.org/hive/hive-3.1.2/apache-hive-3.1.2-bin.tar.gz
解压
cd $HIVE_HOME /conf
cp hive-default.xml.template hive-site.xml
export JAVA_HOME = /usr/java/default
export CLASSPATH = .:$JAVA_HOME /jre/lib/rt.jar:$JAVA_HOME /lib/dt.jar:$JAVA_HOME /lib/tools.jar
export PATH = $JAVA_HOME /bin:$PATH
export HADOOP_HOME = /opt/bigdata/hadoop/current
export HADOOP_MAPRED_HOME = ${HADOOP_HOME}
export HADOOP_COMMON_HOME = ${HADOOP_HOME}
export HADOOP_HDFS_HOME = ${HADOOP_HOME}
export HADOOP_YARN_HOME = ${HADOOP_HOME}
export HADOOP_CONF_DIR = /opt/bigdata/hadoop/current/etc/hadoop
export PATH = $PATH : $HADOOP_HOME /bin:$HADOOP_HOME /sbin
export ZOOKEEPER_HOME = /opt/bigdata/zookeeper-3.4.6
export PATH = $PATH : $ZOOKEEPER_HOME /bin
export HIVE_HOME = /opt/bigdata/hive/current
export PATH = $PATH : $HIVE_HOME /bin
export SPARK_HOME = /opt/bigdata/spark/current
export PATH = $PATH : $SPARK_HOME /bin
CREATE DATABASE ` hive3_remote` /*! 40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */;
CREATE USER 'hive3' @'192.168.%' IDENTIFIED BY '123456' ;
GRANT ALL PRIVILEGES ON hive3_remote.* TO 'hive3' @'192.168.%' ;
FLUSH PRIVILEGES;
for i in { 2,3 ,4,5} ; do scp /etc/profile node0${i} :/etc/profile ; done
for i in { 2 .. 5 } ; do scp -rp mysql-connector-java-5.1.46.jar node0${i} : ` pwd ` ; done
Hive Master
hive-site.xml
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
< configuration>
< property>
< name> hive.metastore.warehouse.dir</ name>
< value> /user/hive_remote/warehouse</ value>
< description> 设置hdfs中的默认目录</ description>
</ property>
< property>
< name> javax.jdo.option.ConnectionURL</ name>
< value> jdbc:mysql://192.168.7.17:3306/hive3_remote?createDatabaseIfNotExist=true& useSSL=false</ value>
< description> 保存元数据的数据库连接</ description>
</ property>
< property>
< name> javax.jdo.option.ConnectionDriverName</ name>
< value> com.mysql.jdbc.Driver</ value>
< description> 数据库驱动,需要拷贝到${HIVE_HOME}/lib目录</ description>
</ property>
< property>
< name> javax.jdo.option.ConnectionUserName</ name>
< value> hive3</ value>
< description> 用户名和密码</ description>
</ property>
< property>
< name> javax.jdo.option.ConnectionPassword</ name>
< value> 123456</ value>
< description> 用户名和密码</ description>
</ property>
< property>
< name> hive.cli.print.header</ name>
< value> true</ value>
</ property>
< property>
< name> hive.cli.print.current.db</ name>
< value> true</ value>
</ property>
</ configuration>
hive表的注释信息乱码的解决
#修改字段注释字符集
ALTER TABLE COLUMNS_V2 modify column COMMENT varchar(256) character set utf8mb4;
#修改表注释字符集
ALTER TABLE TABLE_PARAMS modify column PARAM_VALUE varchar(256) character set utf8mb4;
#修改分区参数,支持分区建用中文表示
ALTER TABLE PARTITION_PARAMS modify column PARAM_VALUE varchar(40000) character set utf8mb4;
ALTER TABLE PARTITION_KEYS modify column PKEY_COMMENT varchar(40000) character set utf8mb4;
#修改表名注释,支持中文表示
ALTER TABLE INDEX_PARAMS modify column PARAM_VALUE varchar(4000) character set utf8mb4;
#修改视图,支持视图中文
ALTER TABLE TBLS modify COLUMN VIEW_EXPANDED_TEXT mediumtext CHARACTER SET utf8mb4;
ALTER TABLE TBLS modify COLUMN VIEW_ORIGINAL_TEXT mediumtext CHARACTER SET utf8mb4;
#修改数据库名称注释
ALTER TABLE `DBS` CHANGE COLUMN `DESC` `DESC` VARCHAR(4000) CHARACTER SET 'utf8' NULL DEFAULT NULL ;
hive客户端节点
node02,node03,node04,node05
<configuration>
<property>
<name>hive.metastore.uris</name>
<value>thrift://node01:9083</value>
<description>metastore地址</description>
</property>
</configuration>
分发hive包
[ god@node01 hive] $ cd /opt/bigdata/hive
[ god@node01 hive] $ tar czf apache-hive-3.1.2-bin.gz apache-hive-3.1.2-bin
[ god@node01 hive] $ for i in { 2 .. 5 } ; do scp -rp apache-hive-3.1.2-bin.gz node0${i} : ` pwd ` ; done
for i in { 2 .. 5 } ; do ssh node0${i} "ls /opt/bigdata/hive" ; done
for i in { 2 .. 5 } ; do ssh node0${i} "cd /opt/bigdata/hive && tar xf apache-hive-3.1.2-bin.gz && ln -s /opt/bigdata/hive/apache-hive-3.1.2-bin /opt/bigdata/hive/current && rm -f /opt/bigdata/hive/apache-hive-3.1.2-bin.gz " ; done
for i in { 2 .. 5 } ; do ssh node0${i} "ls /opt/bigdata/hive" ; done
cd $HIVE_HOME /conf
cat > hive-site.xml <<- EOF
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>hive.metastore.uris</name>
<value>thrift://node01:9083</value>
<description>metastore地址</description>
</property>
</configuration>
EOF
for i in { 2 .. 5 } ; do scp -rp hive-site.xml node0${i} : ` pwd ` ; done
重新拷贝上面master的配置文件 hive-site.xml 很长的那个配置文件,不需要分发
启动 metastore 与 hiveserver2 服务
初始化 Hive 元数据
[ god@node01 conf] $ schematool -dbType mysql -initSchema
com.google.common.base.Preconditions.checkArgument
复制hadoop高版本的guava.jar包到hive
cp /opt/bigdata/hadoop/current/share/hadoop/common/lib/guava-27.0-jre.jar /opt/bigdata/hive/current/lib/
rm -f /opt/bigdata/hive/current/lib/guava-19.0.jar
启动 Metastore 服务
[god@node01 current]$ hive --service metastore
# 后台运行
nohup hive --service metastore 1> /opt/bigdata/hive/current/metastore.log 2>/opt/bigdata/hive/current/metastor_err.log &
启动 Hiveserver2 服务
hive --service hiveserver2 等价于 hiveserver2
nohup hiveserver2 --hiveconf hive.server2.thrift.prot= 10000 1 > /opt/bigdata/hive/current/hiveserver.log 2 > /opt/bigdata/hive/current/hiveserver.err &
beeline -u jdbc:hive2://node03:10000/default -n god
show tables;
[ god@node03 ~] $ hdfs dfs -ls /tmp/hive
Found 3 items
drwx-wx-wx - god supergroup 0 2021 -10-24 12 :24 /tmp/hive/_resultscache_
drwx------ - balala supergroup 0 2021 -10-24 12 :31 /tmp/hive/balala
drwx------ - god supergroup 0 2021 -10-24 12 :31 /tmp/hive/god
hadoop fs -chmod 777 /tmp