整理的一些常用Flink SQL demo

本文整理了一些工作中新手练习具有代表性的Flink SQL,后续有时间会整理出一部分Flink 程序

1. 无界流DataGen任务

drop table if exists StreamSourceTable;
drop table if exists StreamSinkTable;
CREATE TABLE StreamSourceTable (
  content STRING
) WITH (
  'connector' = 'datagen'
);
CREATE TABLE StreamSinkTable (
  orgin STRING
) WITH (
  'connector' = 'print'
);
INSERT INTO StreamSinkTable SELECT content FROM StreamSourceTable;

2. 有界流DataGen任务

CREATE TABLE datagen (
  f_sequence INT
) WITH (
  'connector' = 'datagen',
  'rows-per-second' = '1',
  'fields.f_sequence.kind' = 'sequence',
  'fields.f_sequence.start' = '1',
  'fields.f_sequence.end' = '500'
);
CREATE TABLE print_table (f_sequence INT) WITH ('connector' = 'print');
INSERT INTO print_table select f_sequence from datagen;

3. mysql->mysql CDC任务

create table goods_source (
  goods_id int,
  goods_price decimal(8,2),
  goods_name varchar,
  goods_details varchar,
  PRIMARY KEY (`goods_id`) NOT ENFORCED
) WITH (
  'connector' = 'mysql-cdc',
  'hostname' = 'XXX.XXX.XXX.XXX',
  'port' = '3306',
  'database-name' = 'testcdc',
  'table-name' = 'cdc_source',
  'username' = '**********',
  'password' = '**********',
  'jdbc.properties.useSSL' = 'false'
);
create table goods_target (
  goods_id int,
  goods_price decimal(8,2),
  goods_name varchar,
  goods_details varchar,
  PRIMARY KEY (`goods_id`) NOT ENFORCED
) WITH (
  'connector' = 'jdbc',
  'url' = 'jdbc:mysql://XXX.XXX.XXX.XXX:3306/testcdc?useSSL=false&characterEncoding=utf-8',
  'table-name' = 'cdc_sink',
  'username' = '**********',
  'password' = '**********'
);
insert into
  goods_target
select
  *
from
  goods_source;

4. mysql -> es任务

create table tb_data_source (
id int,
userName varchar,
random double,
PRIMARY KEY (`id`) NOT ENFORCED
)  WITH (
'connector' = 'mysql-cdc',
'hostname' = 'XXX.XXX.XXX.XXX',
'port' = '3306',
'database-name' = 'testcdc',
'table-name' = 'tb_data',
'username' = '**********',
'password' = '**********',
'jdbc.properties.useSSL' = 'false'
);
create table tb_data_targets (
id int,
userName varchar,
random double,
PRIMARY KEY (`id`) NOT ENFORCED
) WITH (
'connector' = 'elasticsearch-7',
'hosts' = 'XXX.XXX.XXX.XXX:9200',
'index' = 'order_index_sxy_1',
'username' = '**********',
'password' = '**********'
);
insert into
tb_data_targets
select *
from 
tb_data_source;

5.kafka -> mysql任务

create table goods_source (
  goods_id int,
  goods_price decimal(8,2),
  goods_name varchar,
  goods_details varchar
) WITH (
  'connector' = 'kafka',
  'properties.bootstrap.servers' = 'XXX.XXX.XXX.XXX:9092',
  'topic' = 'test_kafka',
  'properties.group.id' = 'test-consumer-group-1',
  'properties.security.protocol' = 'SASL_PLAINTEXT',
  'properties.sasl.mechanism' = 'PLAIN',
  'properties.sasl.jaas.config' = 'org.apache.kafka.common.security.plain.PlainLoginModule required username="*****" password="*********";', //替换成kafka的认证信息
  'scan.startup.mode' =  'earliest-offset',
  'format' =  'json'
);
create table goods_target (
  goods_id int,
  goods_price decimal(8,2),
  goods_name varchar,
  goods_details varchar,
  PRIMARY KEY (`goods_id`) NOT ENFORCED
) WITH (
  'connector' = 'jdbc',
  'url' = 'jdbc:mysql://XXX.XXX.XXX.XXX:3306/cdc-sink?useSSL=false&characterEncoding=utf-8',
  'table-name' = 'my_goods_kafka',
  'username' = '**********', //替换用户名
  'password' = '**********'  //替换密码
);
insert into
  goods_target
select
  *
from
  goods_source;

6. kafka -> doris

每隔一分钟统计一次kafka中数据的价格,并提交到doris中。
kafka json数据:

{"biz": "Business_A","order_id": "ORD001","price": 49.99,"detail": "产品A","timestamp": 1654321234000}
CREATE TABLE order_detail(
  `biz` VARCHAR,
  `order_id` VARCHAR,
  `price` DOUBLE,
  `detail` VARCHAR,
  `timestamp` BIGINT,
  `time_ltz` AS TO_TIMESTAMP_LTZ(`timestamp`, 3),
  WATERMARK FOR `time_ltz` AS `time_ltz` - INTERVAL '5' SECOND
) WITH (
  'connector' = 'kafka',
  'properties.bootstrap.servers' = 'XXX.XXX.XXX.XXX:9092',
  'topic' = 'test01',
  'properties.group.id' = 'test-consumer-group-1',
  'properties.security.protocol' = 'PLAINTEXT',
  'scan.startup.mode' = 'latest-offset',
  'format' = 'json'
);
CREATE TABLE order_stat(
  `biz` VARCHAR,
  `window_start` TIMESTAMP(3),
  `window_end` TIMESTAMP(3),
  `total_order_price` DOUBLE,
  `count` BIGINT,
  PRIMARY KEY (biz, window_start) NOT ENFORCED
) WITH (
  'connector' = 'doris',
  'fenodes' = '192.168.65.6:8030',
  'table.identifier' = 'trade.order_stat',
  'username' = '**********',
  'password' = '**********',
  'sink.label-prefix' = 'doris_label2'
);
INSERT INTO
  order_stat
SELECT
  biz,
  window_start,
  window_end,
  SUM(price) AS total_order_price,
  COUNT(*) AS `count`
FROM
  TABLE(
    TUMBLE(
      TABLE order_detail,
      DESCRIPTOR(`time_ltz`),
      INTERVAL '1' MINUTE
    )
  )
GROUP BY
  biz,
  window_start,
  window_end;
  • 3
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值