啊发发发发

文章展示了使用Hive对CSV和HBase数据进行处理的命令,包括数据加载、分隔符设置、表属性配置等。同时,提到了HadoopHDFS的数据查看以及在HBase中的数据查询和分析操作,如聚合、排名和插入更新数据。
摘要由CSDN通过智能技术生成

//TXT

// row format delimited fields terminated by ','

// location '/app/data2/train'

// tblproperties(

// "skip.header.line.count"="1"

// )

// row format delimited fields terminated by "\t"

// location '/app/data1/events/products'

//CSV

// row format serde 'org.apache.hadoop.hive.serde2.OpenCSVSerde'

// WITH SERDEPROPERTIES(

// 'separatorChar'=',',

// 'quoteChar'='\"',

// 'escapeChar'='\\'

// )

// location '/app/data5/exam'

// tblproperties(

// "skip.header.line.count"="1"

// )

//Hbase

// STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'

// WITH SERDEPROPERTIES ("hbase.columns.mapping" =":key,result:sales,result:praise")

// TBLPROPERTIES( "hbase.table.name" = "exam5:spu")

//Hadoop查数据

// hdfs dfs -cat /app/data/exam/meituan_waimai_meishi.csv | wc -l

//Hbase中查数据

//with

//t1 as (select *,concat(createMonth,serviceReasonDetail) as rowkey from

//exam6.ex_exam_after_sales_service)

//insert into exam6.ex_exam_after_sales_service_statistics select

//rowkey,count(serviceReasonDetail) as serviceReasonDetailCount from t1 group by

//rowkey

//with

//t1 as (select continent,recordDate,countryName,confirmedIncr,

//row_number() over(partition by continent,recordDate order by cast(confirmedIncr

//as Int) desc) as rank from exam4.ex_exam_record er )

//insert overwrite table exam4.ex_exam_covid19_record select

//concat(continent,recordDate) as key,countryName,confirmedIncr from t1 where

//t1.rank=1

//with

//t1 as (select *,(spu_price*month_sales) sales from spu_db.ex_spu)

//insert overwrite table spu_db.ex_spu_hbase select concat(shop_id,shop_name) as key,sum(sales) as sales,sum(praise_num) from t1 group by shop_id,shop_name

// scan 'exam6:after_sales_service',{LIMIT=>10}

//分区

create table exam3.stubehavior_partitioned(

stu_id string,

kill_id string,

stage string

)

partitioned by (course string)

row format delimited fields terminated by ','

set hive.exec.dynamic.partition=true

set hive.exec.dynamic.partition.mode=nostrict

insert into exam3.stubehavior_partitioned partition(course) select stu_id,kill_id,stage,course_id as course from exam3.stubehavior

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值