mysql kafka binlog_利用Binlog和Kafka实时同步mysql数据到Elasticsearch(三) - Binlog日志生产消息到Kafka...

目录

前言

- 项目模块

BinlogMiddleware

1、binlog中间件,负责解析binlog,把变动的数据以json形式发送到kafka队列。

KafkaMiddleware

2、kafka中间件,负责消费kafka队列中的Message,把数据写入Elasticsearch中。

- 基础服务

(1)Mysql

(2)Kafka(用于存放mysql变动消息,存放于Kafka队列)

(3)Elasticsearch

- 项目源码

简介:

BinlogMiddleware服务主要负责监听Binlog日志,并将其发送到Kafka队列(及Kafka生产者)。

本示例模拟监听teemoliu数据库的user、role表。为了方便表结构设计的很简单,均只含有id、name两个属性。

中间件写进Kafka队列的消息格式如下:

{"event":"teemoliu.user.update","value":[1,"TeemoLiu"]}

{"event":"teemoliu.role.insert","value":[1,"管理员"]}

项目结构如下:

5acb30ec8347

image.png

1、创建SpringBoot项目。

5acb30ec8347

image.png

2、导入maven引用。

com.github.shyiko

mysql-binlog-connector-java

0.16.1

com.alibaba

fastjson

1.2.49

org.springframework.kafka

spring-kafka

org.apache.kafka

kafka-clients

1.1.1

3、配置文件如下:

# 停用服务端口

spring.main.web-environment=false

# binlog配置

server.id=1

binlog.host=localhost

binlog.port=3306

binlog.user=root

binlog.password=root

# 指定监听的表格

binlog.database.table=teemoliu.user,teemoliu.role

# kafka

spring.kafka.bootstrap-servers=localhost:9092

kafka.topic=binlog

kafka.partNum=3

kafka.repeatNum=1

4、创建Binlog数据传输对象

public class BinlogDto {

private String event;

private Object value;

public BinlogDto(String event, Object value) {

this.event = event;

this.value = value;

}

public BinlogDto() {

}

public String getEvent() {

return event;

}

public void setEvent(String event) {

this.event = event;

}

public Object getValue() {

return value;

}

public void setValue(Object value) {

this.value = value;

}

}

5、创建Kafka数据传输对象

public class Message {

private Long id;

private String msg;

private Date sendTime;

public Message(Long id, String msg, Date sendTime) {

this.id = id;

this.msg = msg;

this.sendTime = sendTime;

}

public Message() {

}

public Long getId() {

return id;

}

public void setId(Long id) {

this.id = id;

}

public String getMsg() {

return msg;

}

public void setMsg(String msg) {

this.msg = msg;

}

public Date getSendTime() {

return sendTime;

}

public void setSendTime(Date sendTime) {

this.sendTime = sendTime;

}

}

6、binlog监听BinlogClientRunner

@Component

public class BinlogClientRunner implements CommandLineRunner {

@Value("${binlog.host}")

private String host;

@Value("${binlog.port}")

private int port;

@Value("${binlog.user}")

private String user;

@Value("${binlog.password}")

private String password;

// binlog server_id

@Value("${server.id}")

private long serverId;

// kafka话题

@Value("${kafka.topic}")

private String topic;

// kafka分区

@Value("${kafka.partNum}")

private int partNum;

// Kafka备份数

@Value("${kafka.repeatNum}")

private short repeatNum;

// kafka地址

@Value("${spring.kafka.bootstrap-servers}")

private String kafkaHost;

// 指定监听的数据表

@Value("${binlog.database.table}")

private String database_table;

@Autowired

KafkaSender kafkaSender;

@Async

@Override

public void run(String... args) throws Exception {

// 创建topic

kafkaSender.createTopic(kafkaHost, topic, partNum, repeatNum);

// 获取监听数据表数组

List databaseList = Arrays.asList(database_table.split(","));

HashMap tableMap = new HashMap();

// 创建binlog监听客户端

BinaryLogClient client = new BinaryLogClient(host, port, user, password);

client.setServerId(serverId);

client.registerEventListener((event -> {

// binlog事件

EventData data = event.getData();

if (data != null) {

if (data instanceof TableMapEventData) {

TableMapEventData tableMapEventData = (TableMapEventData) data;

tableMap.put(tableMapEventData.getTableId(), tableMapEventData.getDatabase() + "." + tableMapEventData.getTable());

}

// update数据

if (data instanceof UpdateRowsEventData) {

UpdateRowsEventData updateRowsEventData = (UpdateRowsEventData) data;

String tableName = tableMap.get(updateRowsEventData.getTableId());

if (tableName != null && databaseList.contains(tableName)) {

String eventKey = tableName + ".update";

for (Map.Entry row : updateRowsEventData.getRows()) {

String msg = JSON.toJSONString(new BinlogDto(eventKey, row.getValue()));

kafkaSender.send(topic, msg);

}

}

}

// insert数据

else if (data instanceof WriteRowsEventData) {

WriteRowsEventData writeRowsEventData = (WriteRowsEventData) data;

String tableName = tableMap.get(writeRowsEventData.getTableId());

if (tableName != null && databaseList.contains(tableName)) {

String eventKey = tableName + ".insert";

for (Serializable[] row : writeRowsEventData.getRows()) {

String msg = JSON.toJSONString(new BinlogDto(eventKey, row));

kafkaSender.send(topic, msg);

}

}

}

// delete数据

else if (data instanceof DeleteRowsEventData) {

DeleteRowsEventData deleteRowsEventData = (DeleteRowsEventData) data;

String tableName = tableMap.get(deleteRowsEventData.getTableId());

if (tableName != null && databaseList.contains(tableName)) {

String eventKey = tableName + ".delete";

for (Serializable[] row : deleteRowsEventData.getRows()) {

String msg = JSON.toJSONString(new BinlogDto(eventKey, row));

kafkaSender.send(topic, msg);

}

}

}

}

}));

client.connect();

}

}

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值