实时数仓、基于Flink1.11的SQL构建实时数仓 之建表脚本

mysql源库建表:

 

-- 订单表(order_info)

CREATE TABLE `order_info` (

`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '编号',

`consignee` varchar(100) DEFAULT NULL COMMENT '收货人',

`consignee_tel` varchar(20) DEFAULT NULL COMMENT '收件人电话',

`total_amount` decimal(10,2) DEFAULT NULL COMMENT '总金额',

`order_status` varchar(20) DEFAULT NULL COMMENT '订单状态',

`user_id` bigint(20) DEFAULT NULL COMMENT '用户id',

`payment_way` varchar(20) DEFAULT NULL COMMENT '付款方式',

`delivery_address` varchar(1000) DEFAULT NULL COMMENT '送货地址',

`order_comment` varchar(200) DEFAULT NULL COMMENT '订单备注',

`out_trade_no` varchar(50) DEFAULT NULL COMMENT '订单交易编号(第三方支付用)',

`trade_body` varchar(200) DEFAULT NULL COMMENT '订单描述(第三方支付用)',

`create_time` datetime DEFAULT NULL COMMENT '创建时间',

`operate_time` datetime DEFAULT NULL COMMENT '操作时间',

`expire_time` datetime DEFAULT NULL COMMENT '失效时间',

`tracking_no` varchar(100) DEFAULT NULL COMMENT '物流单编号',

`parent_order_id` bigint(20) DEFAULT NULL COMMENT '父订单编号',

`img_url` varchar(200) DEFAULT NULL COMMENT '图片路径',

`province_id` int(20) DEFAULT NULL COMMENT '地区',

PRIMARY KEY (`id`)

) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='订单表';

 

-- 订单详情表(order_detail)

CREATE TABLE `order_detail` (

`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '编号',

`order_id` bigint(20) DEFAULT NULL COMMENT '订单编号',

`sku_id` bigint(20) DEFAULT NULL COMMENT 'sku_id',

`sku_name` varchar(200) DEFAULT NULL COMMENT 'sku名称(冗余)',

`img_url` varchar(200) DEFAULT NULL COMMENT '图片名称(冗余)',

`order_price` decimal(10,2) DEFAULT NULL COMMENT '购买价格(下单时sku价格)',

`sku_num` varchar(200) DEFAULT NULL COMMENT '购买个数',

`create_time` datetime DEFAULT NULL COMMENT '创建时间',

PRIMARY KEY (`id`)

) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='订单详情表';

 

-- 商品表(sku_info)

CREATE TABLE `sku_info` (

`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'skuid(itemID)',

`spu_id` bigint(20) DEFAULT NULL COMMENT 'spuid',

`price` decimal(10,0) DEFAULT NULL COMMENT '价格',

`sku_name` varchar(200) DEFAULT NULL COMMENT 'sku名称',

`sku_desc` varchar(2000) DEFAULT NULL COMMENT '商品规格描述',

`weight` decimal(10,2) DEFAULT NULL COMMENT '重量',

`tm_id` bigint(20) DEFAULT NULL COMMENT '品牌(冗余)',

`category3_id` bigint(20) DEFAULT NULL COMMENT '三级分类id(冗余)',

`sku_default_img` varchar(200) DEFAULT NULL COMMENT '默认显示图片(冗余)',

`create_time` datetime DEFAULT NULL COMMENT '创建时间',

PRIMARY KEY (`id`)

) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='商品表';

 

-- 商品一级类目表(base_category1)

CREATE TABLE `base_category1` (

`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '编号',

`name` varchar(10) NOT NULL COMMENT '分类名称',

PRIMARY KEY (`id`)

) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='一级分类表';

 

-- 商品二级类目表(base_category2)

CREATE TABLE `base_category2` (

`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '编号',

`name` varchar(200) NOT NULL COMMENT '二级分类名称',

`category1_id` bigint(20) DEFAULT NULL COMMENT '一级分类编号',

PRIMARY KEY (`id`)

) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='二级分类表';

 

-- 商品三级类目表(base_category3)

CREATE TABLE `base_category3` (

`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '编号',

`name` varchar(200) NOT NULL COMMENT '三级分类名称',

`category2_id` bigint(20) DEFAULT NULL COMMENT '二级分类编号',

PRIMARY KEY (`id`)

) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='三级分类表';

 

-- 省份表(base_province)

CREATE TABLE `base_province` (

`id` int(20) DEFAULT NULL COMMENT 'id',

`name` varchar(20) DEFAULT NULL COMMENT '省名称',

`region_id` int(20) DEFAULT NULL COMMENT '大区id',

`area_code` varchar(20) DEFAULT NULL COMMENT '行政区位码'

) ENGINE=InnoDB DEFAULT CHARSET=utf8;

 

-- 区域表(base_region)

CREATE TABLE `base_region` (

`id` int(20) NOT NULL COMMENT '大区id',

`region_name` varchar(20) DEFAULT NULL COMMENT '大区名称',

PRIMARY KEY (`id`)

) ENGINE=InnoDB DEFAULT CHARSET=utf8;

 

 

 

 

 

flink建表:

-- -------------------------

-- 省份表

-- Kafka Source

-- -------------------------

DROP TABLE IF EXISTS `ods_base_province`;

CREATE TABLE `ods_base_province` (

`id` INT,

`name` STRING,

`region_id` INT ,

`area_code`STRING

) WITH(

'connector' = 'kafka',

'topic' = 'test_base_province',

'properties.bootstrap.servers' = 'm1:9092,m2:9092,s1:9092',

'properties.group.id' = 'testGroup',

'format' = 'canal-json' ,

'scan.startup.mode' = 'earliest-offset'

) ;

-- -------------------------

-- 省份表

-- MySQL Sink

-- -------------------------

DROP TABLE IF EXISTS `base_province`;

CREATE TABLE `base_province` (

`id` INT,

`name` STRING,

`region_id` INT ,

`area_code`STRING,

PRIMARY KEY (id) NOT ENFORCED

) WITH (

'connector' = 'jdbc',

'url' = 'jdbc:mysql://dbmaster:3306/dim',

'table-name' = 'base_province', -- MySQL中的待插入数据的表

'driver' = 'com.mysql.jdbc.Driver',

'username' = 'root',

'password' = 'root',

'sink.buffer-flush.interval' = '1s'

);

-- -------------------------

-- 省份表

-- MySQL Sink Load Data

-- -------------------------

INSERT INTO base_province

SELECT *

FROM ods_base_province;

 

-- -------------------------

-- 区域表

-- Kafka Source

-- -------------------------

DROP TABLE IF EXISTS `ods_base_region`;

CREATE TABLE `ods_base_region` (

`id` INT,

`region_name` STRING

) WITH(

'connector' = 'kafka',

'topic' = 'test_base_region',

'properties.bootstrap.servers' = 'm1:9092,m2:9092,s1:9092',

'properties.group.id' = 'testGroup',

'format' = 'canal-json' ,

'scan.startup.mode' = 'earliest-offset'

) ;

-- -------------------------

-- 区域表

-- MySQL Sink

-- -------------------------

DROP TABLE IF EXISTS `base_region`;

CREATE TABLE `base_region` (

`id` INT,

`region_name` STRING,

PRIMARY KEY (id) NOT ENFORCED

) WITH (

'connector' = 'jdbc',

'url' = 'jdbc:mysql://dbmaster:3306/dim',

'table-name' = 'base_region', -- MySQL中的待插入数据的表

'driver' = 'com.mysql.jdbc.Driver',

'username' = 'root',

'password' = 'root',

'sink.buffer-flush.interval' = '1s'

);

-- -------------------------

-- 区域表

-- MySQL Sink Load Data

-- -------------------------

INSERT INTO base_region

SELECT *

FROM ods_base_region;

 

-- -------------------------

-- 分类1

-- Kafka Source

-- -------------------------

DROP TABLE IF EXISTS `ods_base_category1`;

CREATE TABLE `ods_base_category1` (

`id` BIGINT,

`name` STRING

)WITH(

'connector' = 'kafka',

'topic' = 'test_base_category1',

'properties.bootstrap.servers' = 'm1:9092,m2:9092,s1:9092',

'properties.group.id' = 'testGroup',

'format' = 'canal-json' ,

'scan.startup.mode' = 'earliest-offset'

) ;

-- -------------------------

-- 分类1

-- MySQL Sink

-- -------------------------

DROP TABLE IF EXISTS `base_category1`;

CREATE TABLE `base_category1` (

`id` BIGINT,

`name` STRING,

PRIMARY KEY (id) NOT ENFORCED

) WITH (

'connector' = 'jdbc',

'url' = 'jdbc:mysql://dbmaster:3306/dim',

'table-name' = 'base_category1', -- MySQL中的待插入数据的表

'driver' = 'com.mysql.jdbc.Driver',

'username' = 'root',

'password' = 'root',

'sink.buffer-flush.interval' = '1s'

);

-- -------------------------

-- 分类1

-- MySQL Sink Load Data

-- -------------------------

INSERT INTO sku_info

SELECT *

FROM ods_sku_info;

INSERT INTO base_category1

SELECT *

FROM ods_base_category1;

 

-- -------------------------

-- 分类2

-- Kafka Source

-- -------------------------

DROP TABLE IF EXISTS `ods_base_category2`;

CREATE TABLE `ods_base_category2` (

`id` BIGINT,

`name` STRING,

`category1_id` BIGINT

)WITH(

'connector' = 'kafka',

'topic' = 'test_base_category2',

'properties.bootstrap.servers' = 'm1:9092,m2:9092,s1:9092',

'properties.group.id' = 'testGroup',

'format' = 'canal-json' ,

'scan.startup.mode' = 'earliest-offset'

) ;

 

-- -------------------------

-- 分类2

-- MySQL Sink

-- ------------------------

DROP TABLE IF EXISTS `base_category2`;

CREATE TABLE `base_category2` (

`id` BIGINT,

`name` STRING,

`category1_id` BIGINT,

PRIMARY KEY (id) NOT ENFORCED

) WITH (

'connector' = 'jdbc',

'url' = 'jdbc:mysql://dbmaster:3306/dim',

'table-name' = 'base_category2', -- MySQL中的待插入数据的表

'driver' = 'com.mysql.jdbc.Driver',

'username' = 'root',

'password' = 'root',

'sink.buffer-flush.interval' = '1s'

);

-- -------------------------

-- 分类2

-- MySQL Sink Load Data

-- -------------------------

INSERT INTO base_category2

SELECT *

FROM ods_base_category2;

 

-- -------------------------

-- 分类3

-- Kafka Source

-- -------------------------

DROP TABLE IF EXISTS `ods_base_category3`;

CREATE TABLE `ods_base_category3` (

`id` BIGINT,

`name` STRING,

`category2_id` BIGINT

)WITH(

'connector' = 'kafka',

'topic' = 'test_base_category3',

'properties.bootstrap.servers' = 'm1:9092,m2:9092,s1:9092',

'properties.group.id' = 'testGroup',

'format' = 'canal-json' ,

'scan.startup.mode' = 'earliest-offset'

) ;

-- -------------------------

-- 分类3

-- MySQL Sink

-- ------------------------

DROP TABLE IF EXISTS `base_category3`;

CREATE TABLE `base_category3` (

`id` BIGINT,

`name` STRING,

`category2_id` BIGINT,

PRIMARY KEY (id) NOT ENFORCED

) WITH (

'connector' = 'jdbc',

'url' = 'jdbc:mysql://dbmaster:3306/dim',

'table-name' = 'base_category3', -- MySQL中的待插入数据的表

'driver' = 'com.mysql.jdbc.Driver',

'username' = 'root',

'password' = 'root',

'sink.buffer-flush.interval' = '1s'

);

-- -------------------------

-- 分类3

-- MySQL Sink Load Data

-- -------------------------

INSERT INTO base_category3

SELECT *

FROM ods_base_category3;

 

 

-- -------------------------

-- 商品表

-- Kafka Source

-- -------------------------

 

DROP TABLE IF EXISTS `ods_sku_info`;

CREATE TABLE `ods_sku_info` (

`id` BIGINT,

`spu_id` BIGINT,

`price` DECIMAL(10,0),

`sku_name` STRING,

`sku_desc` STRING,

`weight` DECIMAL(10,2),

`tm_id` BIGINT,

`category3_id` BIGINT,

`sku_default_img` STRING,

`create_time` TIMESTAMP(0)

) WITH(

'connector' = 'kafka',

'topic' = 'test_sku_info',

'properties.bootstrap.servers' = 's1:9092,s2:9092,s3:9092',

'properties.group.id' = 'testGroup',

'format' = 'canal-json' ,

'scan.startup.mode' = 'earliest-offset'

) ;

 

-- -------------------------

-- 商品表

-- MySQL Sink

-- -------------------------

DROP TABLE IF EXISTS `sku_info`;

CREATE TABLE `sku_info` (

`id` BIGINT,

`spu_id` BIGINT,

`price` DECIMAL(10,0),

`sku_name` STRING,

`sku_desc` STRING,

`weight` DECIMAL(10,2),

`tm_id` BIGINT,

`category3_id` BIGINT,

`sku_default_img` STRING,

`create_time` TIMESTAMP(0),

PRIMARY KEY (tm_id) NOT ENFORCED

) WITH (

'connector' = 'jdbc',

'url' = 'jdbc:mysql://dbmaster:3306/dim',

'table-name' = 'sku_info', -- MySQL中的待插入数据的表

'driver' = 'com.mysql.jdbc.Driver',

'username' = 'root',

'password' = 'root',

'sink.buffer-flush.interval' = '1s'

);

 

-- -------------------------

-- 商品

-- MySQL Sink Load Data

-- -------------------------

INSERT INTO sku_info

SELECT *

FROM ods_sku_info;

 

-- -------------------------

-- 订单详情

-- Kafka Source

-- -------------------------

 

DROP TABLE IF EXISTS `ods_order_detail`;

CREATE TABLE `ods_order_detail`(

`id` BIGINT,

`order_id` BIGINT,

`sku_id` BIGINT,

`sku_name` STRING,

`img_url` STRING,

`order_price` DECIMAL(10,2),

`sku_num` INT,

`create_time` TIMESTAMP(0)

) WITH(

'connector' = 'kafka',

'topic' = 'test_order_detail',

'properties.bootstrap.servers' = 's1:9092,s2:9092,s3:9092',

'properties.group.id' = 'testGroup',

'format' = 'canal-json' ,

'scan.startup.mode' = 'earliest-offset'

) ;

 

-- -------------------------

-- 订单信息

-- Kafka Source

-- -------------------------

DROP TABLE IF EXISTS `ods_order_info`;

CREATE TABLE `ods_order_info` (

`id` BIGINT,

`consignee` STRING,

`consignee_tel` STRING,

`total_amount` DECIMAL(10,2),

`order_status` STRING,

`user_id` BIGINT,

`payment_way` STRING,

`delivery_address` STRING,

`order_comment` STRING,

`out_trade_no` STRING,

`trade_body` STRING,

`create_time` TIMESTAMP(0) ,

`operate_time` TIMESTAMP(0) ,

`expire_time` TIMESTAMP(0) ,

`tracking_no` STRING,

`parent_order_id` BIGINT,

`img_url` STRING,

`province_id` INT

) WITH(

'connector' = 'kafka',

'topic' = 'test_order_info',

'properties.bootstrap.servers' = 's1:9092,s2:9092,s3:9092',

'properties.group.id' = 'testGroup',

'format' = 'canal-json' ,

'scan.startup.mode' = 'earliest-offset'

) ;

 

-- ---------------------------------

-- DWD层,支付订单明细表dwd_paid_order_detail

-- ---------------------------------

DROP TABLE IF EXISTS dwd_paid_order_detail;

CREATE TABLE dwd_paid_order_detail

(

detail_id BIGINT,

order_id BIGINT,

user_id BIGINT,

province_id INT,

sku_id BIGINT,

sku_name STRING,

sku_num INT,

order_price DECIMAL(10,0),

create_time TIMESTAMP(0),

pay_time TIMESTAMP(0)

) WITH (

'connector' = 'kafka',

'topic' = 'dwd_paid_order_detail',

'scan.startup.mode' = 'earliest-offset',

'properties.bootstrap.servers' = 's1:9092,s2:9092,s3:9092',

'format' = 'changelog-json'

);

 

-- ---------------------------------

-- DWD层,已支付订单明细表

-- 向dwd_paid_order_detail装载数据

-- ---------------------------------

INSERT INTO dwd_paid_order_detail

SELECT

od.id,

oi.id order_id,

oi.user_id,

oi.province_id,

od.sku_id,

od.sku_name,

od.sku_num,

od.order_price,

oi.create_time,

oi.operate_time

FROM

(

SELECT *

FROM ods_order_info

WHERE order_status = '2' -- 已支付

) oi JOIN

(

SELECT *

FROM ods_order_detail

) od

ON oi.id = od.order_id;

 

 

应用层建表:

ads_province_index

首先在MySQL中创建对应的ADS目标表:ads_province_index

CREATE TABLE ads.ads_province_index(

province_id INT(10),

area_code VARCHAR(100),

province_name VARCHAR(100),

region_id INT(10),

region_name VARCHAR(100),

order_amount DECIMAL(10,2),

order_count BIGINT(10),

dt VARCHAR(100),

PRIMARY KEY (province_id, dt)

) ;

 

向MySQL的ADS层目标装载数据:

-- Flink SQL Cli操作

-- ---------------------------------

-- 使用 DDL创建MySQL中的ADS层表

-- 指标:1.每天每个省份的订单数

-- 2.每天每个省份的订单金额

-- ---------------------------------

CREATE TABLE ads_province_index(

province_id INT,

area_code STRING,

province_name STRING,

region_id INT,

region_name STRING,

order_amount DECIMAL(10,2),

order_count BIGINT,

dt STRING,

PRIMARY KEY (province_id, dt) NOT ENFORCED

) WITH (

'connector' = 'jdbc',

'url' = 'jdbc:mysql://dbmaster:3306/ads',

'table-name' = 'ads_province_index',

'driver' = 'com.mysql.jdbc.Driver',

'username' = 'root',

'password' = 'root'

);

-- ---------------------------------

-- dwd_paid_order_detail已支付订单明细宽表

-- ---------------------------------

CREATE TABLE dwd_paid_order_detail

(

detail_id BIGINT,

order_id BIGINT,

user_id BIGINT,

province_id INT,

sku_id BIGINT,

sku_name STRING,

sku_num INT,

order_price DECIMAL(10,2),

create_time STRING,

pay_time STRING

) WITH (

'connector' = 'kafka',

'topic' = 'dwd_paid_order_detail',

'scan.startup.mode' = 'earliest-offset',

'properties.bootstrap.servers' = 's1:9092,s2:9092,s3:9092',

'format' = 'changelog-json'

);

 

-- ---------------------------------

-- tmp_province_index

-- 订单汇总临时表

-- ---------------------------------

CREATE TABLE tmp_province_index(

province_id INT,

order_count BIGINT,-- 订单数

order_amount DECIMAL(10,2), -- 订单金额

pay_date DATE

)WITH (

'connector' = 'kafka',

'topic' = 'tmp_province_index',

'scan.startup.mode' = 'earliest-offset',

'properties.bootstrap.servers' = 's1:9092,s2:9092,s3:9092',

'format' = 'changelog-json'

);

-- ---------------------------------

-- tmp_province_index

-- 订单汇总临时表数据装载

-- ---------------------------------

INSERT INTO tmp_province_index

SELECT

province_id,

count(distinct order_id) order_count,-- 订单数

sum(order_price * sku_num) order_amount, -- 订单金额

DATE_FORMAT(pay_time,'yyyy-MM-dd') pay_date

FROM dwd_paid_order_detail

GROUP BY province_id,DATE_FORMAT(pay_time,'yyyy-MM-dd')

;

-- ---------------------------------

-- tmp_province_index_source

-- 使用该临时汇总表,作为数据源

-- ---------------------------------

CREATE TABLE tmp_province_index_source(

province_id INT,

order_count BIGINT,-- 订单数

order_amount DECIMAL(10,2), -- 订单金额

pay_date DATE,

proctime as PROCTIME() -- 通过计算列产生一个处理时间列

) WITH (

'connector' = 'kafka',

'topic' = 'tmp_province_index',

'scan.startup.mode' = 'earliest-offset',

'properties.bootstrap.servers' = 's1:9092,s2:9092,s3:9092',

'format' = 'changelog-json'

);

 

-- ---------------------------------

-- DIM层,区域维表,

-- 创建区域维表数据源

-- ---------------------------------

DROP TABLE IF EXISTS `dim_province`;

CREATE TABLE dim_province (

province_id INT,

province_name STRING,

area_code STRING,

region_id INT,

region_name STRING ,

PRIMARY KEY (province_id) NOT ENFORCED

) WITH (

'connector' = 'jdbc',

'url' = 'jdbc:mysql://kms-1:3306/dim',

'table-name' = 'dim_province',

'driver' = 'com.mysql.jdbc.Driver',

'username' = 'root',

'password' = '123qwe',

'scan.fetch-size' = '100'

);

 

-- ---------------------------------

-- 向ads_province_index装载数据

-- 维表JOIN

-- ---------------------------------

 

INSERT INTO ads_province_index

SELECT

pc.province_id,

dp.area_code,

dp.province_name,

dp.region_id,

dp.region_name,

pc.order_amount,

pc.order_count,

cast(pc.pay_date as VARCHAR)

FROM

tmp_province_index_source pc

JOIN dim_province FOR SYSTEM_TIME AS OF pc.proctime as dp

ON dp.province_id = pc.province_id;

-----------------------------------------------------------------------------------------------------------------------------------

-- APP 实时数据采集
drop table hxd_log_str;
CREATE TABLE hxd_log_str (
sdate varchar ,
cdate varchar ,    
ky varchar    ,    
id varchar    ,    
ch varchar    ,    
av varchar    ,    
sv varchar    ,    
ui varchar    ,    
mc varchar    ,    
im varchar    ,    
ut varchar    ,    
qq varchar    ,    
mf varchar    ,    
md varchar    ,    
sd varchar    ,    
ip varchar    ,    
op varchar    ,    
tn varchar    ,    
cn varchar    ,    
ov varchar    ,    
lg varchar    ,    
sr varchar    ,    
tz varchar    ,    
jb varchar    ,    
si varchar    ,    
ts varchar    ,    
pi varchar    ,    
rf varchar    ,    
et varchar    ,    
tags varchar  ,    
ei varchar    ,    
ar varchar    ,    
kv varchar    ,    
du varchar    ,    
er varchar    ,    
ea varchar    ,    
ext1 varchar  ,    
ext2 varchar  ,    
ext3 varchar  ,    
ext4 varchar  ,    
ext5 varchar  ,    
ext6 varchar  ,    
ext7 varchar  ,    
ext8 varchar  ,    
ext9 varchar  ,    
ext10 varchar ,    
ext11 varchar ,    
ext12 varchar ,    
ext13 varchar ,    
ext14 varchar ,    
ext15 varchar ,    
fm varchar    ,    
di varchar    ,    
uid varchar   ,    
ec varchar   
) WITH (
  'connector' = 'kafka',
 'topic' = 'hb_log',
 'properties.bootstrap.servers' = 'm1:9092,m2:9092,s1:9092',
 'properties.group.id' = 'hb_log_group',
 'scan.startup.mode' = 'earliest-offset',
  'format' = 'json'
);

-- APP设备激活表
DROP TABLE IF EXISTS hbs_device_activate;
create table hbs_device_activate(
di varchar(255) COMMENT '设备id',
act_date date COMMENT '激活日期',
PRIMARY KEY (`di`)
);

-- APP用户注册表
DROP TABLE IF EXISTS hbs_user_register;
create table hbs_user_register(
uid varchar(255) COMMENT '用户id',
ch varchar(255) COMMENT '注册渠道',
reg_date date COMMENT '注册日期',
PRIMARY KEY (`uid`)
);

-- -------------------------
--   APP设备激活表
--   MySQL Sink
-- ------------------------- 
DROP TABLE IF EXISTS `base_hbs_device_activate`;
CREATE TABLE `base_hbs_device_activate` (
    `di` string,
    `act_date` STRING,
     PRIMARY KEY (di) NOT ENFORCED
) WITH (
    'connector' = 'jdbc',
    'url' = 'jdbc:mysql://dbmaster:3306/test',
    'table-name' = 'hbs_device_activate', -- MySQL中的待插入数据的表
    'driver' = 'com.mysql.jdbc.Driver',
    'username' = 'root',
    'password' = 'root',
    'sink.buffer-flush.interval' = '1s'
);
insert into base_hbs_device_activate
select aa.di,min(cdate) as act_date from (
select a.di,a.cdate from hxd_log_str a 
    left join base_hbs_device_activate b
    on a.di=b.di
where b.di is null
)aa
group by aa.di

-- -------------------------
--   APP用户注册表
--   MySQL Sink
-- ------------------------- 
DROP TABLE IF EXISTS `base_hbs_user_register`;
CREATE TABLE `base_hbs_user_register` (
    `uid` string,
    `ch` string,
    `reg_date` STRING,
     PRIMARY KEY (uid) NOT ENFORCED
) WITH (
    'connector' = 'jdbc',
    'url' = 'jdbc:mysql://dbmaster:3306/test',
    'table-name' = 'hbs_user_register', -- MySQL中的待插入数据的表
    'driver' = 'com.mysql.jdbc.Driver',
    'username' = 'root',
    'password' = 'root',
    'sink.buffer-flush.interval' = '1s'
);
 

注:本文参考了 西贝 的 实时数仓|基于Flink1.11的SQL构建实时数仓探索实践

特别鸣谢作者:西贝

文章还在完善中,这只是前期部分内容。

  • 0
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值