- 下载安装包或下载源码(自己编译);
- 解压安装包
.# 删除lib下的guava-19.0.jar
rm -rf guava-19.0.jar
cp hadoop//share/hadoop/common/lib/guava-27.0-jre.jar /hive/lib
cp hadoop//share/hadoop/common/lib/hadoop-common.xxx.jar /hive/lib
-
配置环境变量
#java export JAVA_HOME=/usr/local/jdk1.8.0_202 export JRE_HOME=${JAVA_HOME}/jre export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib:$CLASSPATH export JAVA_PATH=${JAVA_HOME}/bin:${JRE_HOME}/bin export PATH=$PATH:${JAVA_PATH} #hadoop export HADOOP_HOME=/usr/local/hadoop-3.3.1 export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin #hive export HIVE_HOME=/usr/local/hive export PATH=$PATH:$HIVE_HOME/bin
-
配置hive/conf/hive-env.sh文件
# Set HADOOP_HOME to point to a specific hadoop install directory HADOOP_HOME=/usr/local/hadoop-3.3.1 # Hive Configuration Directory can be controlled by: export HIVE_CONF_DIR=/usr/local/hive/conf # Folder containing extra libraries required for hive compilation/execution can be controlled by: export HIVE_AUX_JARS_PATH=/usr/local/hive/lib
-
配置 hive/conf/hive-site.xml文件
<?xml version="1.0" encoding="UTF-8" standalone="no"?> <?xml-stylesheet type="text/xsl" href="configuration.xsl"?> <configuration> <property> <name>javax.jdo.option.ConnectionURL</name> <value>jdbc:mysql://127.0.0.1:3306/hive?createDatabaseIfNotExist=true</value> </property> <property> <name>javax.jdo.option.ConnectionDriverName</name> <value>com.mysql.jdbc.Driver</value> </property> <property> <name>javax.jdo.option.ConnectionUserName</name> <value>root</value> </property> <property> <name>javax.jdo.option.ConnectionPassword</name> <value>1q2w#E$R</value> </property> <property> <name>hive.querylog.location</name> <value>/user/hive/log</value> </property> <property> <name>datanucleus.metadata.validate</name> <value>false</value> </property> <property> <name>hive.metastore.schema.verification</name> <value>false</value> </property> <property> <name>datanucleus.schema.autoCreateAll</name> <value>true</value> </property> <!-- H2S运行绑定host --> <property> <name>hive.server2.thrift.bind.host</name> <value>node1</value> </property> <!-- 远程模式部署metastore metastore地址 --> <property> <name>hive.metastore.uris</name> <value>thrift://node1:9083</value> </property> <!-- 关闭元数据存储授权 --> <property> <name>hive.metastore.event.db.notification.api.auth</name> <value>false</value> </property> </configuration>
-
把mysql-connect-java.jar 复制到 hive/lib下, 需要区分mysql版本
-
启动hadoop集群创建文件夹
hdfs dfs -mkdir -p /user/hive/warehouse hdfs dfs -mkdir -p /user/hive/tmp hdfs dfs -mkdir -p /user/hive/log hdfs dfs -chmod g+w /user/hive/warehouse hdfs dfs -chmod g+w /user/hive/tmp hadoop fs -chmod -R 777 /user/hive/tmp hdfs dfs -chmod g+w /user/hive/log
-
初始化数据库(schematool -dbType mysql -initSchema)
-
启动metastore ( nohup hive --service metastore > metastore.log 2>&1 &)
-
启动远程访问 (nohup hive --service hiveserver2 > hiveserver2.log 2>&1 &)
beeline 访问连接> ! connect jdbc:hive2://node1:10000