mysql cdc_Flink之Mysql数据CDC

知识点:

https://github.com/ververica/flink-cdc-connectors //官网地址

1、依赖

org.apache.flink

flink-streaming-java_2.11

${flink.version}

org.apache.flink

flink-clients_2.11

${flink.version}

mysql

mysql-connector-java

8.0.16

com.alibaba.ververica

flink-connector-mysql-cdc

1.0.0

org.slf4j

slf4j-api

1.7.25

org.slf4j

slf4j-log4j12

1.7.25

2、处理类

importorg.apache.flink.configuration.Configuration;importorg.apache.flink.streaming.api.environment.StreamExecutionEnvironment;importorg.apache.flink.streaming.api.functions.source.SourceFunction;importcom.alibaba.ververica.cdc.debezium.StringDebeziumDeserializationSchema;importcom.alibaba.ververica.cdc.connectors.mysql.MySQLSource;/*** @program: Flink1.11

* @description:

*@author: yang

* @create: 2021-01-11 17:41*/

public classMySqlBinlogSourceExample {public static void main(String[] args) throwsException {

SourceFunction sourceFunction = MySQLSource.builder()

.hostname("localhost")

.port(3306)

.databaseList("test")//monitor all tables under inventory database

.username("root")

.password("root")

.deserializer(newStringDebeziumDeserializationSchema())//converts SourceRecord to String

.build();

StreamExecutionEnvironment env= StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(newConfiguration());

env.addSource(sourceFunction).print().setParallelism(1);//use parallelism 1 for sink to keep message ordering

env.execute("test");

}

}

3、binlog结果

修改:befor and after

SourceRecord{

sourcePartition={server=mysql-binlog-source},

sourceOffset={ts_sec=1610362335, file=mysql-bin.000004, pos=233445691, row=1, server_id=1, event=2}

}

ConnectRecord

{topic='mysql-binlog-source.test.weblog', kafkaPartition=null, key=Struct{id=5}, keySchema=Schema{mysql_binlog_source.test.weblog.Key:STRUCT}, value=Struct{before=Struct{id=5,url=5,method=5,ip=5,args=5,create_time=1610390670000},after=Struct{id=5,url=5555,method=5555,ip=5555,args=5555,create_time=1610390670000},source=Struct{version=1.2.0.Final,connector=mysql,name=mysql-binlog-source,ts_ms=1610362335000,db=test,table=weblog,server_id=1,file=mysql-bin.000004,pos=233445826,row=0,thread=944986},op=u,ts_ms=1610362335615}, valueSchema=Schema{mysql_binlog_source.test.weblog.Envelope:STRUCT}, timestamp=null, headers=ConnectHeaders(headers=)

}

增加:只有after

SourceRecord{sourcePartition={server=mysql-binlog-source}, sourceOffset={file=mysql-bin.000004, pos=233455303}}

ConnectRecord

{topic='mysql-binlog-source.test.weblog', kafkaPartition=null, key=Struct{id=7}, keySchema=Schema{mysql_binlog_source.test.weblog.Key:STRUCT}, value=Struct{after=Struct{id=7,url=7,method=7,ip=7,args=7,create_time=1610391478000},source=Struct{version=1.2.0.Final,connector=mysql,name=mysql-binlog-source,ts_ms=0,snapshot=last,db=test,table=weblog,server_id=0,file=mysql-bin.000004,pos=233455303,row=0},op=c,ts_ms=1610362692061}, valueSchema=Schema{mysql_binlog_source.test.weblog.Envelope:STRUCT}, timestamp=null, headers=ConnectHeaders(headers=)}

删除:只有before

SourceRecord{sourcePartition={server=mysql-binlog-source}, sourceOffset={ts_sec=1610362743, file=mysql-bin.000004, pos=233456891, row=1, server_id=1, event=2}} ConnectRecord{topic='mysql-binlog-source.test.weblog', kafkaPartition=null, key=Struct{id=1}, keySchema=Schema{mysql_binlog_source.test.weblog.Key:STRUCT}, value=Struct{before=Struct{id=1,url=1,method=1,ip=1,args=1,create_time=1603115590000},source=Struct{version=1.2.0.Final,connector=mysql,name=mysql-binlog-source,ts_ms=1610362743000,db=test,table=weblog,server_id=1,file=mysql-bin.000004,pos=233457026,row=0,thread=944986},op=d,ts_ms=1610362744527}, valueSchema=Schema{mysql_binlog_source.test.weblog.Envelope:STRUCT}, timestamp=null, headers=ConnectHeaders(headers=)}

4、如果需要将数据进行etl,解析数据,然后自定义实现sink

#####################################测试代码不能用,由于开源不完善,所以无法完成....##################################################

5、测试代码

importcom.alibaba.fastjson.JSON;importcom.alibaba.ververica.cdc.debezium.DebeziumDeserializationSchema;importcom.alibaba.ververica.cdc.debezium.StringDebeziumDeserializationSchema;importorg.apache.flink.api.common.functions.MapFunction;importorg.apache.flink.api.common.typeinfo.BasicTypeInfo;importorg.apache.flink.api.common.typeinfo.TypeInformation;importorg.apache.flink.configuration.Configuration;importorg.apache.flink.streaming.api.environment.StreamExecutionEnvironment;importorg.apache.flink.streaming.api.functions.source.SourceFunction;importcom.alibaba.ververica.cdc.connectors.mysql.MySQLSource;importorg.apache.flink.util.Collector;importorg.apache.kafka.connect.data.Struct;importorg.apache.kafka.connect.source.SourceRecord;importjava.lang.reflect.Field;/*** @program: Flink1.11

* @description:

*@author: yang

* @create: 2021-01-11 17:41*/

public classMySqlBinlogSourceExample {public static void main(String[] args) throwsException {

SourceFunction sourceFunction = MySQLSource.builder()

.hostname("123.207.27.238")

.port(3306)

.databaseList("test","spark_job").tableList("test.weblog","spark_test")

.username("root")

.password("hushuo")

.deserializer(newStringDebeziumDeserializationSchema())//.deserializer(new MyDebeziumDeserializationSchema())

.build();

StreamExecutionEnvironment env= StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(newConfiguration());

env.addSource(sourceFunction).print().setParallelism(1);//env.addSource(sourceFunction).map(new MyMapHandler()).print().setParallelism(1);

env.execute("test");

}

}class MyMapHandler implements MapFunction{public Object map(String s) throwsException {

SourceRecord sourceRecord= JSON.toJavaObject(JSON.parseObject(s),SourceRecord.class);

Struct struct=(Struct)sourceRecord.value();

System.out.println("struct:"+struct);returns;

}

}class MyDebeziumDeserializationSchema implementsDebeziumDeserializationSchema{public static final String CREATE = "c";public static final String DELETE = "d";public static final String UPDATE = "u";public void deserialize(SourceRecord sourceRecord, Collector collector) throwsException {

Struct value=(Struct)sourceRecord.value();

String op= value.getString("op");

Struct data= null;if(CREATE.equals(op)){//增加

data = this.createData(value);

}else if(DELETE.equals(op)){//删除

data = this.deleteData(value);

}else if(UPDATE.equals(op)){//修改

data = this.updateData(value);

}else{throw new RuntimeException("data is error......");

}

collector.collect(JSON.toJSONString(data));

}publicTypeInformation getProducedType() {returnBasicTypeInfo.STRING_TYPE_INFO;

}privateStruct updateData( Struct value){

System.out.println("修改");

Struct beforeData= (Struct)value.get("before");

System.out.println("修改之前数据before:"+beforeData.toString());

Struct afterData= (Struct)value.get("after");

System.out.println("修改之后数据afterData:"+afterData.toString());returnafterData;

}privateStruct deleteData( Struct value){

System.out.println("删除");

Struct beforeData= (Struct)value.get("before");

System.out.println("before:"+beforeData.toString());returnbeforeData;

}privateStruct createData( Struct value){

System.out.println("增加");

Struct afterData= (Struct)value.get("after");

System.out.println("afterData:"+afterData.toString());returnafterData;

}

}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值