本文目标
mysql->canal->Kafka->Flink->mysql
Flink消费Kafka中客户、产品、订单(ID)三张表的数据合并为一张订单(NAME)表。
前置环境
- mysql内创建三张表
-- 客户信息表
CREATE TABLE "YINYX"."T_CUST" (
"CUST_ID" NUMBER(9,0) VISIBLE NOT NULL,
"CUST_NAME" VARCHAR2(32 BYTE) VISIBLE
);
ALTER TABLE "YINYX"."T_CUST" ADD CONSTRAINT "SYS_C007568" PRIMARY KEY ("CUST_ID");
-- 产品信息表
CREATE TABLE "YINYX"."T_PROD" (
"PROD_ID" NUMBER(9,0) VISIBLE NOT NULL,
"PROD_NAME" VARCHAR2(32 BYTE) VISIBLE
);
ALTER TABLE "YINYX"."T_PROD" ADD CONSTRAINT "SYS_C007569" PRIMARY KEY ("PROD_ID");
-- 订单信息表
CREATE TABLE "YINYX"."T_ORDER" (
"ORDER_ID" NUMBER(9,0) VISIBLE NOT NULL,
"CUST_ID" NUMBER(9,0) VISIBLE,
"PROD_ID" NUMBER(9,0) VISIBLE,
"AMOUNT" NUMBER(9,0) VISIBLE
);
ALTER TABLE "YINYX"."T_ORDER" ADD CONSTRAINT "SYS_C007570" PRIMARY KEY ("ORDER_ID");
- MySQL内创建一张表
CREATE TABLE "public"."t_order_out" (
"order_id" int8 NOT NULL,
"cust_name" varchar(50) COLLATE "pg_catalog"."default",
"prod_name" varchar(50) COLLATE "pg_catalog"."default",
"amount" int8
);
ALTER TABLE "public"."t_order_out" ADD CONSTRAINT "t_order_out_pkey" PRIMARY KEY ("order_id");
FLink内创建任务
-- Kafka内T_CUST客户信息表变更信息映射表
CREATE TABLE tcust_kafka (
CUST_ID BIGINT NOT NULL,
CUST_NAME STRING NULL,
PRIMARY KEY(CUST_ID) NOT ENFORCED
) WITH (
'connector' = 'kafka',
'topic' = 'yyx.YINYX.T_CUST',
'properties.bootstrap.servers' = '127.0.0.1:9092',
'scan.startup.mode' = 'earliest-offset',
'debezium-json.schema-include' = 'false',
'properties.group.id' = 'gyyx',
'format' = 'debezium-json'
);
-- Kafka内T_PROD产品信息表变更信息映射表
CREATE TABLE tprod_kafka (
PROD_ID BIGINT NOT NULL,
PROD_NAME STRING NULL,
PRIMARY KEY(PROD_ID) NOT ENFORCED
) WITH (
'connector' = 'kafka',
'topic' = 'yyx.YINYX.T_PROD',
'properties.bootstrap.servers' = '127.0.0.1:9092',
'scan.startup.mode' = 'earliest-offset',
'debezium-json.schema-include' = 'false',
'properties.group.id' = 'gyyx',
'format' = 'debezium-json'
);
-- Kafka内T_ORDER订单信息表变更信息映射表
CREATE TABLE torder_kafka (
ORDER_ID BIGINT NOT NULL,
CUST_ID BIGINT NULL,
PROD_ID BIGINT NULL,
AMOUNT BIGINT NULL,
PRIMARY KEY(ORDER_ID) NOT ENFORCED
) WITH (
'connector' = 'kafka',
'topic' = 'yyx.YINYX.T_ORDER',
'properties.bootstrap.servers' = '127.0.0.1:9092',
'scan.startup.mode' = 'earliest-offset',
'debezium-json.schema-include' = 'false',
'properties.group.id' = 'gyyx',
'format' = 'debezium-json'
);
-- 客户信息表在PG库内的同步测试,本案例没有用到
CREATE TABLE tcust_pg (
cust_id BIGINT NOT NULL,
cust_name STRING NULL,
PRIMARY KEY(cust_id) NOT ENFORCED
) WITH (
'connector' = 'jdbc',
'url' = 'jdbc:postgresql://127.0.0.1:6432/test',
'username' = 'test',
'password' = 'test',
'driver' = 'org.postgresql.Driver',
'table-name' = 't_cust'
);
-- 同步测试,用于验证基本同步通道是否正确
insert into tcust_pg(cust_id, cust_name) select CUST_ID, CUST_NAME from tcust_kafka;
-- PG库内的转换目标表,本案例的输出结果
CREATE TABLE torderout_pg (
order_id BIGINT NOT NULL,
cust_name STRING NULL,
prod_name STRING NULL,
amount BIGINT NULL,
PRIMARY KEY(order_id) NOT ENFORCED
) WITH (
'connector' = 'jdbc',
'url' = 'jdbc:postgresql://127.0.0.1:6432/test',
'username' = 'test',
'password' = 'test',
'driver' = 'org.postgresql.Driver',
'table-name' = 't_order_out'
);
-- 提交转换任务给FLink,三个Kafka数据源关联同步到到一个PG目标表
insert into torderout_pg(order_id, cust_name, prod_name, amount)
select o.ORDER_ID, c.CUST_NAME, p.PROD_NAME, o.AMOUNT, o.CUST_ID, o.PROD_ID from torder_kafka o
inner join tcust_kafka c on o.CUST_ID=c.CUST_ID
inner join tprod_kafka p on o.PROD_ID=p.PROD_ID;