9(11)6.1.4 DWS层加载数据脚本11

1)在hadoop102的/home/atguigu/bin目录下创建脚本
[atguigu@hadoop102 bin]$ vim dws_uv_log.sh
在脚本中编写如下内容
#!/bin/bash

定义变量方便修改

APP=gmall
hive=/opt/module/hive/bin/hive

如果是输入的日期按照取输入日期;如果没输入日期取当前时间的前一天

if [ -n “$1” ] ;then
do_date=$1
else
do_date=date -d "-1 day" +%F
fi

sql="
set hive.exec.dynamic.partition.mode=nonstrict;

insert overwrite table " A P P " . d w s u v d e t a i l d a y p a r t i t i o n ( d t = ′ APP".dws_uv_detail_day partition(dt=' APP".dwsuvdetaildaypartition(dt=do_date’)
select
mid_id,
concat_ws(’|’, collect_set(user_id)) user_id,
concat_ws(’|’, collect_set(version_code)) version_code,
concat_ws(’|’, collect_set(version_name)) version_name,
concat_ws(’|’, collect_set(lang)) lang,
concat_ws(’|’, collect_set(source)) source,
concat_ws(’|’, collect_set(os)) os,
concat_ws(’|’, collect_set(area)) area,
concat_ws(’|’, collect_set(model)) model,
concat_ws(’|’, collect_set(brand)) brand,
concat_ws(’|’, collect_set(sdk_version)) sdk_version,
concat_ws(’|’, collect_set(gmail)) gmail,
concat_ws(’|’, collect_set(height_width)) height_width,
concat_ws(’|’, collect_set(app_time)) app_time,
concat_ws(’|’, collect_set(network)) network,
concat_ws(’|’, collect_set(lng)) lng,
concat_ws(’|’, collect_set(lat)) lat
from " A P P " . d w d s t a r t l o g w h e r e d t = ′ APP".dwd_start_log where dt=' APP".dwdstartlogwheredt=do_date’
group by mid_id;

insert overwrite table “ A P P " . d w s u v d e t a i l w k p a r t i t i o n ( w k d t ) s e l e c t m i d i d , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( u s e r i d ) ) u s e r i d , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( v e r s i o n c o d e ) ) v e r s i o n c o d e , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( v e r s i o n n a m e ) ) v e r s i o n n a m e , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( l a n g ) ) l a n g , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( s o u r c e ) ) s o u r c e , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( o s ) ) o s , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( a r e a ) ) a r e a , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( m o d e l ) ) m o d e l , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( b r a n d ) ) b r a n d , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( s d k v e r s i o n ) ) s d k v e r s i o n , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( g m a i l ) ) g m a i l , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( h e i g h t w i d t h ) ) h e i g h t w i d t h , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( a p p t i m e ) ) a p p t i m e , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( n e t w o r k ) ) n e t w o r k , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( l n g ) ) l n g , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( l a t ) ) l a t , d a t e a d d ( n e x t d a y ( ′ APP".dws_uv_detail_wk partition(wk_dt) select mid_id, concat_ws('|', collect_set(user_id)) user_id, concat_ws('|', collect_set(version_code)) version_code, concat_ws('|', collect_set(version_name)) version_name, concat_ws('|', collect_set(lang)) lang, concat_ws('|', collect_set(source)) source, concat_ws('|', collect_set(os)) os, concat_ws('|', collect_set(area)) area, concat_ws('|', collect_set(model)) model, concat_ws('|', collect_set(brand)) brand, concat_ws('|', collect_set(sdk_version)) sdk_version, concat_ws('|', collect_set(gmail)) gmail, concat_ws('|', collect_set(height_width)) height_width, concat_ws('|', collect_set(app_time)) app_time, concat_ws('|', collect_set(network)) network, concat_ws('|', collect_set(lng)) lng, concat_ws('|', collect_set(lat)) lat, date_add(next_day(' APP".dwsuvdetailwkpartition(wkdt)selectmidid,concatws(,collectset(userid))userid,concatws(,collectset(versioncode))versioncode,concatws(,collectset(versionname))versionname,concatws(,collectset(lang))lang,concatws(,collectset(source))source,concatws(,collectset(os))os,concatws(,collectset(area))area,concatws(,collectset(model))model,concatws(,collectset(brand))brand,concatws(,collectset(sdkversion))sdkversion,concatws(,collectset(gmail))gmail,concatws(,collectset(heightwidth))heightwidth,concatws(,collectset(apptime))apptime,concatws(,collectset(network))network,concatws(,collectset(lng))lng,concatws(,collectset(lat))lat,dateadd(nextday(do_date’,‘MO’),-7),
date_add(next_day(‘ d o d a t e ′ , ′ M O ′ ) , − 1 ) , c o n c a t ( d a t e a d d ( n e x t d a y ( ′ do_date','MO'),-1), concat(date_add( next_day(' dodate,MO),1),concat(dateadd(nextday(do_date’,‘MO’),-7), ‘_’ , date_add(next_day(' d o d a t e ′ , ′ M O ′ ) , − 1 ) ) f r o m " do_date','MO'),-1) ) from " dodate,MO),1))from"APP”.dws_uv_detail_day
where dt>=date_add(next_day(‘ d o d a t e ′ , ′ M O ′ ) , − 7 ) a n d d t < = d a t e a d d ( n e x t d a y ( ′ do_date','MO'),-7) and dt<=date_add(next_day(' dodate,MO),7)anddt<=dateadd(nextday(do_date’,‘MO’),-1)
group by mid_id;

insert overwrite table " A P P " . d w s u v d e t a i l m n p a r t i t i o n ( m n ) s e l e c t m i d i d , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( u s e r i d ) ) u s e r i d , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( v e r s i o n c o d e ) ) v e r s i o n c o d e , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( v e r s i o n n a m e ) ) v e r s i o n n a m e , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( l a n g ) ) l a n g , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( s o u r c e ) ) s o u r c e , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( o s ) ) o s , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( a r e a ) ) a r e a , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( m o d e l ) ) m o d e l , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( b r a n d ) ) b r a n d , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( s d k v e r s i o n ) ) s d k v e r s i o n , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( g m a i l ) ) g m a i l , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( h e i g h t w i d t h ) ) h e i g h t w i d t h , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( a p p t i m e ) ) a p p t i m e , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( n e t w o r k ) ) n e t w o r k , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( l n g ) ) l n g , c o n c a t w s ( ′ ∣ ′ , c o l l e c t s e t ( l a t ) ) l a t , d a t e f o r m a t ( ′ APP".dws_uv_detail_mn partition(mn) select mid_id, concat_ws('|', collect_set(user_id)) user_id, concat_ws('|', collect_set(version_code)) version_code, concat_ws('|', collect_set(version_name)) version_name, concat_ws('|', collect_set(lang))lang, concat_ws('|', collect_set(source)) source, concat_ws('|', collect_set(os)) os, concat_ws('|', collect_set(area)) area, concat_ws('|', collect_set(model)) model, concat_ws('|', collect_set(brand)) brand, concat_ws('|', collect_set(sdk_version)) sdk_version, concat_ws('|', collect_set(gmail)) gmail, concat_ws('|', collect_set(height_width)) height_width, concat_ws('|', collect_set(app_time)) app_time, concat_ws('|', collect_set(network)) network, concat_ws('|', collect_set(lng)) lng, concat_ws('|', collect_set(lat)) lat, date_format(' APP".dwsuvdetailmnpartition(mn)selectmidid,concatws(,collectset(userid))userid,concatws(,collectset(versioncode))versioncode,concatws(,collectset(versionname))versionname,concatws(,collectset(lang))lang,concatws(,collectset(source))source,concatws(,collectset(os))os,concatws(,collectset(area))area,concatws(,collectset(model))model,concatws(,collectset(brand))brand,concatws(,collectset(sdkversion))sdkversion,concatws(,collectset(gmail))gmail,concatws(,collectset(heightwidth))heightwidth,concatws(,collectset(apptime))apptime,concatws(,collectset(network))network,concatws(,collectset(lng))lng,concatws(,collectset(lat))lat,dateformat(do_date’,‘yyyy-MM’)
from " A P P " . d w s u v d e t a i l d a y w h e r e d a t e f o r m a t ( d t , ′ y y y y − M M ′ ) = d a t e f o r m a t ( ′ APP".dws_uv_detail_day where date_format(dt,'yyyy-MM') = date_format(' APP".dwsuvdetaildaywheredateformat(dt,yyyyMM)=dateformat(do_date’,‘yyyy-MM’)
group by mid_id;
"

h i v e − e " hive -e " hivee"sql"
2)增加脚本执行权限
[atguigu@hadoop102 bin]$ chmod 777 dws_uv_log.sh
3)脚本使用
[atguigu@hadoop102 module]$ dws_uv_log.sh 2019-02-11
4)查询结果
hive (gmall)> select count() from dws_uv_detail_day where dt=‘2019-02-11’;
hive (gmall)> select count(
) from dws_uv_detail_wk;
hive (gmall)> select count(*) from dws_uv_detail_mn ;
5)脚本执行时间
企业开发中一般在每日凌晨30分~1点

一、课程简介随着技术的飞速发展,经过多年的数据积累,各互联网公司已保存了海量的原始数据和各种业务数据,所以数据仓库技术是各大公司目前都需要着重发展投入的技术领域。数据仓库是面向分析的集成化数据环境,为企业所有决策制定过程,提供系统数据支持的战略集合。通过对数据仓库中数据的分析,可以帮助企业改进业务流程、控制成本、提高产品质量等。二、课程内容本次精心打造的数仓项目的课程,从项目架构的搭建,到数据采集模块的设计、数仓架构的设计、实战需求实现、即席查询的实现,我们针对国内目前广泛使用的Apache原生框架和CDH版本框架进行了分别介绍,Apache原生框架介绍中涉及到的技术框架包括Flume、Kafka、Sqoop、MySql、HDFS、Hive、Tez、Spark、Presto、Druid等,CDH版本框架讲解包括CM的安装部署、Hadoop、Zookeeper、Hive、Flume、Kafka、Oozie、Impala、HUE、Kudu、Spark的安装配置,透彻了解不同版本框架的区别联系,将大数据全生态系统前沿技术一网打尽。在过程中对大数据生态体系进行了系统的讲解,对实际企业数仓项目中可能涉及到的技术点都进行了深入的讲解和探讨。同时穿插了大量数仓基础理论知识,让你在掌握实战经验的同时能够打下坚实的理论基础。三、课程目标本课程以国内电商巨头实际业务应用场景为依托,对电商数仓的常见实战指标以及难点实战指标进行了详尽讲解,具体指标包括:每日、周、月活跃设备明细,留存用户比例,沉默用户、回流用户、流失用户统计,最近连续3周活跃用户统计,最近7天内连续3天活跃用户统计,GMV成交总额分析,转化率及漏斗分析,品牌复购率分析、订单表拉链表的设计等,让学生拥有更直观全面的实战经验。通过对本课程的学习,对数仓项目可以建立起清晰明确的概念,系统全面的掌握各项数仓项目技术,轻松应对各种数仓难题。四、课程亮点本课程结合国内多家企业实际项目经验,特别加入了项目架构模块,从集群规模的确定到框架版本选型以及服务器选型,手把手教你从零开始搭建大数据集群。并且总结大量项目实战中会遇到的问题,针对各个技术框架,均有调优实战经验,具体包括:常用Linux运维命令、Hadoop集群调优、Flume组件选型及性能优化、Kafka集群规模确认及关键参数调优。通过这部分学习,助学生迅速成长,获取前沿技术经验,从容解决实战问题。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值