hadoop3.2.0 完全分布式安装 hive-3.1.1
#解压缩
tar -zxvf /usr/local/soft/apache-hive-3.1.1-bin.tar.gz -C /usr/local/
#修改目录名称
mv /usr/local/apache-hive-3.1.1-bin/ /usr/local/apache-hive-3.1.1
vim /etc/profile
#增加HIVE环境变量内容
export HIVE_HOME=/usr/local/apache-hive-3.1.1 export PATH=$PATH:$HIVE_HOME/bin export CLASSPATH=$CLASSPATH:$HIVE_HOME/bin
使环境变量生效:source /etc/profile
vim /usr/local/apache-hive-3.1.1/conf/hive-env.sh
#增加内容
export HADOOP_HOME=/usr/local/hadoop-3.1.1 export HIVE_CONF_DIR=/usr/local/apache-hive-3.1.1/conf
vim /usr/local/apache-hive-3.1.1/conf/hive-site.xml
<?xml version="1.0" encoding="UTF-8" standalone="no"?> <?xml-stylesheet type="text/xsl" href="configuration.xsl"?> <configuration> <property> <name>javax.jdo.option.ConnectionURL</name> <value>jdbc:mysql://192.168.67.1:3306/hadoop_hive_meta?allowMultiQueries=true&useSSL=false&verifyServerCertificate=false</value> </property> <property> <name>javax.jdo.option.ConnectionDriverName</name> <value>com.mysql.jdbc.Driver</value> </property> <property> <name>javax.jdo.option.ConnectionUserName</name> <value>数据库用户名</value> </property> <property> <name>javax.jdo.option.ConnectionPassword</name> <value>数据库密码</value> </property> <property> <name>datanucleus.readOnlyDatastore</name> <value>false</value> </property> <property> <name>datanucleus.fixedDatastore</name> <value>false</value> </property> <property> <name>datanucleus.autoCreateSchema</name> <value>true</value> </property> <property> <name>datanucleus.autoCreateTables</name> <value>true</value> </property> <property> <name>datanucleus.autoCreateColumns</name> <value>true</value> </property> </configuration>
#将下载好的 mysql 驱动拷贝到hive的lib目录中
cp /usr/local/soft/mysql-connector-java-8.0.12.jar /usr/local/apache-hive-3.1.1/lib/
#在 mysql 服务器中新建空数据库 hadoop_hive_meta,跟 hive-site.xml 中配置的数据库名称一致
#首次启动初始化schema
schematool -dbType mysql -initSchema