-- Create table
set hive.exec.dynamic.partition.mode=nonstrict;--使用动态分区
set hive.exec.compress.output=true;--控制hive的查询结果输出是否进行压缩
set mapreduce.map.output.compress=true;----压缩输出
set mapred.map.output.compress.codec=org.apache.hadoop.io.compress.SnappyCodec;
set mapreduce.output.fileoutputformat.compress=true;--开启mapreduce中map输出压缩功能,默认为false
set mapreduce.output.fileoutputformat.compress.codec=org.apache.hadoop.io.compress.SnappyCodec;--设置mapreduce中map输出数据的压缩方式
set mapreduce.output.fileoutputformat.compress.type=BLOCK;--设置mapreduce最终数据输出压缩为块压缩
set PARQUET.COMPRESSION=SNAPPY;--设置压缩
drop table if exists dwd_fahs.collect_accounts_accounting_su;
create table DWD_FA.collect_accounts_accounting_su
(
ann string comment '年份',
subj_code string comment '科目编号',
collacco_code string comment '套账编号',
subj_name string comment '科目名称',
subj_level_code string comment '科目级别代码',
supr_subj_code string comment '上级科目编号',
subj_type_code string comment '科目类别代码',
subj_prop_code string comment '科目性质代码',
subj_prop_desc string comment '科目性质描述',
curr_code string comment '币种代码',
balc_dir_code string comment '余额方向代码',
subj_lvl string comment '科目层级',
detail_subj_sign string comment '明细科目标志',
check_sign string comment '核算标志',
carryover_sign string comment '结转标志',
batch_time string comment '跑批起始时间',
created_by string comment '操作人',
ndc_created_time string comment '数据创建时间',
ndc_updated_time string comment '数据更新时间'
) partitioned by (orisys string)
row format delimited fields terminated by "\u0001"
stored as parquet
location '/user/hive/warehouse/dwd_fa.db/collect_accounts_accounting_su'
tblproperties ("parquet.compression" = "snappy");
建表压缩hive配置
最新推荐文章于 2023-05-30 10:16:36 发布