考试知识点整理

hadoop   start-all.sh
zkS  zkServer.sh start
hive2    hive --service hiveserver2
        hive --service metastore
hbase  start-hbase.sh  hbase shell
zepplin zeppelin-daemon.sh start

%sh
hdfs dfs -rm -r /app
hdfs dfs -mkdir -p /app/data/exam
hdfs dfs -put /opt/returned_goods_log_7-9.csv /app/data/exam
hdfs dfs -cat /data../*.csv  |wc -l


hbase 创建
create_namespace 'exam'
create 'exam:sales','statisitcs'

hive 外部表 
create database exam
create external table exam.sale_service(
    
)
row format delimited fields terminated by ','

row format serde 'org.apache.hadoop.hive.serde2.OpenCSVSerde'
with serdeproperties 
(
    'separatorChar' = ',',  
    'quoteChar' = '\"',
    'escapeChar'= '\\'
)

location '/app/data/exam'
tblproperties('skip.header.line.count'='1')     ---去首行

hive 映射 hbase
create external table exam.hbase_sale_service(
key string,
service int
)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'    
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,result:service,result:praise")    
TBLPROPERTIES("hbase.table.name" = "exam:sales")


数据导入hive映射表中
with
t1 as (select continent,countryName,recordDate,confirmedIncr,row_number() over(partition by recordDate order by cast(confirmedIncr as int) desc) rank from ex_exam_record),
t2 as (select continent,countryName,recordDate,confirmedIncr from t1 where rank=1 order by recordDate)
insert into ex_exam_covid19_record select concat(continent,recordDate) key,countryName maxIncreaseCountry,confirmedIncr maxIncreaseCount from t2

hbase查看
scan 'exam:sales',{LIMIT=>10}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值