#close firewall
#mysql开启远程登录权限#bind-address= 127.0.0.1
#解压hive-0.13.1-bin.tar.gz
tar -zxvf hive-0.13.1-bin.tar.gz
#修改目录名称为hive
mv apache-hive-0.13.1-bin hive
#自行设置环境变量为实际路径
sudo gedit /etc/profile
export HADOOP_HOME=/home/hduser/hadoop-2.2.0
export HIVE_HOME=/home/hduser/hive
export PATH=$HIVE_HOME/bin:$PATH
#使配置生效
source /etc/profile
#启动hadoop
start-all.sh
#建立HIVE数据仓库路径/tmp 和/user/hive/warehouse
hadoop fs -mkdir /tmp
hadoop fs -mkdir /user
hadoop fs -mkdir /user/hive
hadoop fs -mkdir /user/hive/warehouse
hadoop fs -chmod g+w /tmp
hadoop fs -chmod g+w /user/hive/warehouse
#mysql建hive用户
mysql -uroot -p
create user 'hive'@'node01' identified by 'hadoop';
grant all on hive.* to 'hive'@'node01' identified by 'hadoop';
flush privileges;
#配置文件
cd ~/hive/conf
sudo gedit hive-site.xml
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<!-- 在configuration中加入配置 -->
<property>
<name>hive.metastore.warehouse.dir</name>
<value>/user/hive/warehouse</value>
</property>
<property>
<name>hive.metastore.local</name>
<value>true</value>
</property>
<!-- 如果是远程mysql数据库的话需要在这里写入远程的IP或hosts -->
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://node01:3306/hive?createDatabaseIfNotExist=true</value>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.jdbc.Driver</value>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>hive</value>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>hadoop</value>
</property>
</configuration>
复制Mysql驱动到目录hive/lib
mysql-connector-java-5.1.28.jar
#启动mysql服务
sudo /usr/local/mysql/support-files/mysql.server start
#启动 Hive CLI
hive
show databases
show tables
#启动HIVE JDBC服务
hiveserver2
启动JDBC客户端
beeline -u jdbc:hive2://localhost:10000
#建立Hive表
hive> CREATE TABLE pokes (foo INT, bar STRING);
#查看表
hive>show tables;
#查看表结构
hive>show create table pokes;
#从本地文件导入数据
LOAD DATA LOCAL INPATH "/home/hduser/hive/examples/files/kv1.txt" OVERWRITE INTO TABLE pokes;
select * from pokes;
#建分区表
CREATE TABLE invites (foo INT, bar STRING) PARTITIONED BY (ds STRING);
LOAD DATA LOCAL INPATH "/home/hduser/hive/examples/files/kv2.txt" OVERWRITE INTO TABLE invites PARTITION (ds='2008-08-15');
LOAD DATA LOCAL INPATH "/home/hduser/hive/examples/files/kv3.txt" OVERWRITE INTO TABLE invites PARTITION (ds='2008-08-08');
#查看分区
show partitions invites;
#导出数据到本地目录
INSERT OVERWRITE LOCAL DIRECTORY '/tmp/local_out2' SELECT a.* FROM invites a WHERE a.ds='2008-08-15';
#查看数据
cat /tmp/local_out2/*
Hive安装
最新推荐文章于 2023-04-19 14:07:30 发布