hive的DDL语法
根据链接https://blog.csdn.net/helloHbulie/article/details/115376657敲代码,加深自己的记忆
对数据库的操作
创建数据库
create database if not exists myhive;
创建数据库并指定hdfs存储位置
create database myhive2 location '/myhive2';
修改数据库
alter database myhive2 set dbproperties('createtime' = '20230302');
查看数据库详细信息
查看数据库基本信息
desc database myhive;
查看数据库详细信息
desc database extended myhive2;
删除数据库
删除一个空数据库
drop database myhive2;
强制删除数据库,包含数据库下面的表一起删除
drop database myhive cascade;
对数据表的操作
建内部表
use myhive;
create table stu(id int,name string);
insert into stu values (1,"zhangsan");
insert inti stu values (1,"zhangsan),(2,"lisi");
select id,name from stu;
创建表并指定字段之间的分隔符
create table if not exists stu2(id int,name string) row format delimited fields terminated by '\t' stored as textfile location '/user/stu2';
根据查询结果创建表
create table stu3 as select * from stu2;
根据已经存在的表结构创建表
create table stu4 like stu2;
查询表结构
只查询表内字段及属性
desc stu2;
详细查询
desc formatted stu2;
查询创建表的语句
show create table stu2;
构建外部表
create external table student (s_id string,s_name string) row format delimited fields terminated by '\t';
从本地文件系统向表中加载数据
追加操作
load data local inpath '/export/servers/hivedatas/student.csv' into table student;
覆盖操作
load data local inpath '/export/servers/hivedatas/student.csv' overwrite into table student;
从hdfs文件系统向表中加载数据
load data inpath ' /hivedatas/techer.csv' into table techer;
加载数据到指定分区
load data inpath '/hivedatas/techer.csv' into table techer partition(cur_date=20230302);
创建分区表
create table score(s_id string,s_score int) partitioned by (month string);
创建一个表带多个分区
create table score2(s_id string,s_score int) partitioned by (year string,month string,day string);
加载数据到一个分区的表中
load data local inpath '/export/servers/hivedatas/score.csv' into table score partition (month='202303');
加载数据到一个多分区的表中
load data local inpath ''/export/servers/hivedatas/score.csv'' into table score2 partition(year='2023',month='03',day='02');
查看分区
show partitions score;
添加一个分区
alter table score add partition(month='202303');
同时添加多个分区
alter table score add partition(month='202303') partition(month='202304');
删除分区
alter table score drop partition(month='202303');
开启hive的桶表功能
set hive.enforce.bucketing=true;
设置reduce的个数
set mapreduce.job.reduces=3;
创建桶表
create table course (c_id string,c_name string) clustered by(c_id) into 3 buckets;
通过insert overwrite给桶表中加载数据
insert overwrite table course select * from course_common cluster by(c_id); --最后指定桶字段
修改表名称
alter table old_table_name rename to new_table_name;
增加/修改列信息
查询表结构
desc score5;
添加列
alter table score5 add columns (mycol string,mysco string);
更新列
alter table score5 change column mysco mysconew int;
删除表操作
drop table score5;
清空表操作
truncate table score6;
直接向分区表中插入数据
insert into table score partition(month='202303') values('001','002','100');
通过load方式加载数据
load data local inpath '/export/servers/hivedatas/score.csv' overwrite into table score partition(month='202303');
通过查询方式加载数据
insert overwrite table score2 partition(month='202303') select s_id,c_id,s_score from score1;
查询语句中创建表并加数据
create table score2 as select * from score1;
在创建表时通过location指定加载数据的路径
create external table score6 (s_id string,c_id string,s_score int) row format delimited fields terminated by ',' location '/myscore';
export导出与import导入hive表数据(内部表操作)
create table techer2 like techer;
export table techer to '/export/techer';
import table techer2 from '/export/techer';
insert导出
将查询结果导出到本地
insert overwrite local directory '/export/servers/exporthive' select * from score;
将查询结果格式化导出到本地
insert overwrite local directory '/export/servers/exporthive' row format delimited fields terminated by '\t' collection items terminated by '/#' select *from student;
将查询结果导出到hdfs上(没有local)
insert overwrite directory '/export/servers/exporthive' row format delimited fields terminated by '\t' collection items terminated by '#' select * from score;
hadoop命令导出到本地
dfs -get /export/servers/exporthive/000000_0 /export/servers/exporthive/local.txt;
hive shell命令导出
基本语法:(hive -f /-e 脚本或者执行语句 > file)
hive -e "select * from myhive.score;" > /export/servers/exporthive/score.txt
hive -f export.sh > /export/servers/exporthive/score.txt
export导出到HDFS上
export table score to '/export/exporthive/score';