需要提前配置好hadoop
tar -zxvf apache-hive-2.3.1-bin.tar.gz
mv apache-hive-2.3.1-bin /usr/local/hive
echo export HIVE_HOME=/usr/local/hive>>~/.bashrc
echo export PATH=$PATH:$HIVE_HOME/bin>>~/.bashrc
echo export CLASSPATH=$CLASSPATH:/usr/local/hive/lib/*:.>>~/.bashrc
source ~/.bashrc
cd $HIVE_HOME/conf
cp hive-default.xml.template hive-default.xml
hdfs dfs -mkdir /tmp
hdfs dfs -mkdir /user/hive/warehouse
hdfs dfs -mkdir /user
hdfs dfs -mkdir /user/hive
hdfs dfs -mkdir /user/hive/warehouse
hdfs dfs -chmod g+w /tmp
hdfs dfs -chmod g+w /user/hive/warehouse
vi $HIVE_HOME/scripts/metastore/upgrade/derby/hive-schema-2.3.0.derby.sql
#注释掉前两行SQL
schematool -dbType derby -initSchema --verbose
hiveserver2
$HIVE_HOME/hcatalog/sbin/hcat_server.sh start
beeline -u "jdbc:hive2://localhost:10000" -n root