目录
hive配置
前置条件:jdk,hadoop, mysql都要安装完成并启动
[root@kb131 install]# tar -zxf /opt/install/apache-hive-3.1.2-bin.tar.gz -C /opt/soft/
[root@kb131 soft]# mv /opt/soft/apache-hive-3.1.2-bin/ /opt/soft/hive312
[root@kb131 soft]# touch /opt/soft/hive312/conf/hive-site.xml
[root@kb131 soft]# vim /opt/soft/hive312/conf/hive-site.xml
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>hive.metastore.warehouse.dir</name>
<value>/hive312/warehouse</value>
</property>
<property>
<name>hive.metastore.db.type</name>
<value>mysql</value>
</property>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://192.168.78.141:3306/kb131?createDatabaseIfNotExist=true</value>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.cj.jdbc.Driver</value>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>root</value>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>root</value>
</property>
<property>
<name>hive.metastore.schema.verification</name>
<value>false</value>
</property>
<property>
<name>hive.cli.print.current.db</name>
<value>true</value>
</property>
<property>
<name>hive.cli.print.header</name>
<value>true</value>
</property>
</configuration>
[root@kb131 conf]# cp /opt/install/mysql-connector-java-8.0.29.jar /opt/soft/hive312/lib/
[root@kb131 lib]# rm -f /opt/soft/hive312/lib/guava-19.0.jar
[root@kb131 hadoop313]# cp /opt/soft/hadoop313/share/hadoop/common/lib/guava-27.0-jre.jar /opt/soft/hive312/lib/
[root@kb131 hive312]# vim /etc/profile
#HIVE
export HIVE_HOME=/opt/soft/hive312
export PATH=$HIVE_HOME/bin:$PATH
[root@kb131 hive312]# source /etc/profile
[root@kb131 hive312]# schematool -dbType mysql -initSchema
[root@kb131 hive312]# hive
hive (default)> show databases;
hive (default)> show tables;
hive启动
启动元数据服务
[root@kb131 hive312]# nohup hive --service metastore &
启动hiveserver2服务
[root@kb131 hive312]# nohup hive --service hiveserver2 &
通过beeline方式连接hive
[root@kb131 hive312]# beeline -u jdbc:hive2://192.168.153.141:10000 -n root
hive数据库操作
create database if not exists kb22diao; 创建数据库
drop database if exists kb22diao; 删除数据库
drop database kb22diao cascade; 强制删库
show databases; 查看所有数据库
use kb22; 使用选中数据库
select current_database(); 查看当前使用的数据库
describe database kb22; 查看库详情
desc database kb22; 查看库详情
create table if not exists demo1(id int, name string); 创建表
desc demo1; 查看表字段
desc formatted demo1;
show create table demo1; 查看创建表详情
show tables; 查看当前库中所有表
insert into demo1 values(1,"zhangsan"); 插入数据
insert into demo1 values(2,"lisi");
select * from demo1;
insert overwrite table demo1 values(3,"wangwu"); 重写,将原表覆盖
alter table demo1 rename to demo1b; 修改表名
alter table demo1b change name uname string; 修改列名
alter table demo1b add columns(age int, address string, email string);
hive建表语句
[root@kb131 ~]# vim ./student.txt
id name like address
1,小明1,lol-book-movie,beijing:bdqn-nanjing:zhongbo
2,小明2,lol-book-movie,beijing:bdqn-nanjing:zhongbo
3,小明3,lol-book-movie,beijing:bdqn-nanjing:zhongbo
4,小明4,lol-book-movie,beijing:bdqn-nanjing:zhongbo
5,小明5,lol-movie,beijing:bdqn-nanjing:zhongbo
6,小明6,book-movie,beijing:bdqn-nanjing:zhongbo
7,小明7,lol-book,beijing:bdqn-nanjing:zhongbo
8,小明8,lol-book,beijing:bdqn-nanjing:zhongbo
9,小明9,lol-book-movie,beijing:bdqn-nanjing:zhongbo
本机 /root/student.txt
hdfs /hivestu/student.txt
内部表
也叫托管表,是Hive在创建表时的默认表。
特点:在内部表被删除后,表的元数据和表数据都从HDFS中完全删除
外部表
特点:表中的数据在删除后仍然在HDFS中。
如果我们创建一个外部表,在删除表之后,只有与表相关的元数据被删除,而不会删除表的内容。
声明外部表的关键字:external
建表语句(内部表)
create table student(
id int,
name string,
likes array<string>,
address map<string, string>
)
row format delimited fields terminated by ','
collection items terminated by '-'
map keys terminated by ':'
lines terminated by '\n';
加载数据
load data inpath '/hivestu/student.txt' into table student;
load data local inpath '/root/student.txt' into table student; 不推荐
drop table student; 数据和源数据一并被删除
建表语句(外部表)
create external table student_external(
id int,
name string,
likes array<string>,
address map<string, string>
)
row format delimited fields terminated by ','
collection items terminated by '-'
map keys terminated by ':'
lines terminated by '\n'
location '/hivestu';
select * from student_external;
drop table student_external;