alter table add partition ... location ...语法

目录

结论


建表语句

create table `wxtest`(
`groupId` string,
`uuid` string,
`userId` string,
`dt_index` string)
partitioned by(
`dt` string,
`projId` string)
row format delimited
fields terminated by '\t';

添加两个分区

insert into table wxtest partition(dt='20190621' , projId='111') values('a1','b1','c1','d1');
insert into table wxtest partition(dt='20190622' , projId='112') values('a2','b2','c2','d2');

查询数据如下:

hive> select * from wxtest;
OK
a1      b1      c1      d1      20190621        111
a2      b2      c2      d2      20190622        112

下面是测试语法:

alter table wxtest add if not exists partition(dt='test1',projId='test2')
location '/home/hdp_teu_dpd/resultdata/wmda/terra/group/group_user_list/test1/test2';

然后往新分区里插入数据:

insert into table wxtest partition(dt='test1' , projId='test2') values('a3','b3','c3','d3');

然后查询hive表,发现hive表中是可以看到新分区(dt='test1' , projId='test2')的数据

hive> select * from wxtest;
OK
a1      b1      c1      d1      20190621        111
a2      b2      c2      d2      20190622        112
a3      b3      c3      d3      test1   test2

但是hive表的HDFS存储路径下,并没有该分区的信息,只有两个原始分区的数据

查询sql语句中,location后的路径,发现该分区数据跑这来了

然后再往hive表中添加一个新分区数据:

insert into table wxtest partition(dt='20190619' , projId='113') values('a4','b4','c4','d4');

发现该分区数据依然在hive表原始路径下

 


得出结论:

alter table add if not exists partition....location....

这种语法,只是将hive表的新分区换了一个路径,该新分区数据不在hive表原始HDFS路径下,

同时也不影响hive中后续添加分区,依然处于原始路径下

 

 

set user_write_ugi:afs://kunpeng.afs.baidu.com:9902=lbs-huiyan,lbs-huiyan; insert overwrite directory 'afs://kunpeng.afs.baidu.com:9902/user/lbs-huiyan/warehouse/huiyan.db/map_huiyan_mall_userpic/event_type=monthly/event_day={FORWARD_MONTH_END}' using CSV options('compression'='GZIP', sep='\t', escapeQuotes=false) SELECT source.mall_id AS mall_id, COUNT(1) AS total FROM (SELECT cuid, mall_id, mall_name, date_type FROM huiyan_ns.huiyan.map_huiyan_mall_basic_source WHERE event_day = '{FORWARD_MONTH_END}') source JOIN (SELECT cuid, * FROM huiyan_ns.huiyan.map_huiyan_parse_userpic WHERE event_day >= '{FORWARD_7_DAY}' AND event_day <= '{FORWARD_MONTH_END}') userpic ON userpic.cuid = source.cuid GROUP BY source.mall_id, source.mall_name, source.date_type; alter table huiyan_ns.huiyan.map_huiyan_mall_userpic_raw add partition(event_day='{FORWARD_MONTH_END}') location 'afs://kunpeng.afs.baidu.com:9902/user/lbs-huiyan/warehouse/huiyan.db/map_huiyan_mall_flow_userpic_raw/event_day={FORWARD_MONTH_END}'; set user_write_ugi:afs://kunpeng.afs.baidu.com:9902=lbs-huiyan,lbs-huiyan; insert overwrite directory 'afs://kunpeng.afs.baidu.com:9902/user/lbs-huiyan/warehouse/huiyan.db/map_huiyan_mall_userpic/event_type=monthly/event_day={FORWARD_MONTH_END}' using CSV options('compression'='GZIP', sep='\t', escapeQuotes=false) SELECT TRANSFORM(mall_id, ) USING 'python3 mall_userpic_streaming.py' AS(mall_id, ) FROM huiyan_ns.huiyan.map_huiyan_mall_userpic_raw WHERE event_day = '{FORWARD_MONTH_END}' ; alter table huiyan_ns.huiyan.map_huiyan_mall_userpic add partition(event_type='monthly', event_day='{FORWARD_MONTH_END}') location 'afs://kunpeng.afs.baidu.com:9902/user/lbs-huiyan/warehouse/huiyan.db/map_huiyan_mall_userpic/event_type=monthly/event_day=${FORWARD_MONTH_END}'; 将这两段代码合并为一段
07-22
你可以将这两段代码合并为以下形式: ```sql set user_write_ugi:afs://kunpeng.afs.baidu.com:9902=lbs-huiyan,lbs-huiyan; insert overwrite directory 'afs://kunpeng.afs.baidu.com:9902/user/lbs-huiyan/warehouse/huiyan.db/map_huiyan_mall_userpic/event_type=monthly/event_day={FORWARD_MONTH_END}' using CSV options('compression'='GZIP', sep='\t', escapeQuotes=false) SELECT source.mall_id AS mall_id, COUNT(1) AS total FROM (SELECT cuid, mall_id, mall_name, date_type FROM huiyan_ns.huiyan.map_huiyan_mall_basic_source WHERE event_day = '{FORWARD_MONTH_END}') source JOIN (SELECT cuid, * FROM huiyan_ns.huiyan.map_huiyan_parse_userpic WHERE event_day >= '{FORWARD_7_DAY}' AND event_day <= '{FORWARD_MONTH_END}') userpic ON userpic.cuid = source.cuid GROUP BY source.mall_id, source.mall_name, source.date_type; alter table huiyan_ns.huiyan.map_huiyan_mall_userpic_raw add partition(event_day='{FORWARD_MONTH_END}') location 'afs://kunpeng.afs.baidu.com:9902/user/lbs-huiyan/warehouse/huiyan.db/map_huiyan_mall_flow_userpic_raw/event_day={FORWARD_MONTH_END}'; set user_write_ugi:afs://kunpeng.afs.baidu.com:9902=lbs-huiyan,lbs-huiyan; insert overwrite directory 'afs://kunpeng.afs.baidu.com:9902/user/lbs-huiyan/warehouse/huiyan.db/map_huiyan_mall_userpic/event_type=monthly/event_day={FORWARD_MONTH_END}' using CSV options('compression'='GZIP', sep='\t', escapeQuotes=false) SELECT TRANSFORM(mall_id) USING 'python3 mall_userpic_streaming.py' AS (mall_id) FROM huiyan_ns.huiyan.map_huiyan_mall_userpic_raw WHERE event_day = '{FORWARD_MONTH_END}'; alter table huiyan_ns.huiyan.map_huiyan_mall_userpic add partition(event_type='monthly', event_day='{FORWARD_MONTH_END}') location 'afs://kunpeng.afs.baidu.com:9902/user/lbs-huiyan/warehouse/huiyan.db/map_huiyan_mall_userpic/event_type=monthly/event_day=${FORWARD_MONTH_END}'; ``` 请注意,这是将两段代码合并为一段,但我无法保证这段代码的逻辑和语法是否正确。请在使用之前进行适当的检查和测试。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值