前言
一需求如下,通过第三方数据传递到我们平台的数据需要实施发送邮件处理,目前的方案是使用spring容器中的定时任务@Schedul,每隔15min轮询一次数据库表,此行为存在mysql压力大,且不实时处理数据,故引入canal组件。
canal简介
canal的工作原理就是把自己伪装成MySQL slave,模拟MySQL slave的交互协议向MySQL Mater发送 dump协议,MySQL mater收到canal发送过来的dump请求,开始推送binary log给canal,然后canal解析binary log,然后后续处理。但是canal的数据同步不是全量的,而是增量。基于binary log增量订阅和消费
代码实现
@Configuration
public class CanalListener implements ApplicationRunner {
@Value("${canal.ip}")
private String ip;//Canal IP
@Value("${canal.port}")
private Integer port;//Canal Port
@Value("${canal.destination}")
private String destination;// 目标配置 即放目标instance.properties的文件夹名字
@Value("${canal.username}")
private String username;//用户名
@Value("${canal.password}")
private String password;//密码
public void run(ApplicationArguments args) throws Exception {
//开始干活
dowork();
}
//官方示例代码
public void dowork(){
// 创建链接
CanalConnector connector = CanalConnectors.newSingleConnector(new InetSocketAddress(ip,
port), destination, username, password);
int batchSize = 1000;
try {
connector.connect();
connector.subscribe("test.role");
connector.rollback();
while (true) {
Message message = connector.getWithoutAck(batchSize); // 获取指定数量的数据
long batchId = message.getId();
int size = message.getEntries().size();
if (batchId == -1 || size == 0) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
System.out.println(e);
}
} else {
run(message.getEntries());
}
connector.ack(batchId); // 提交确认接收成功
// connector.rollback(batchId); // 处理失败, 回滚数据 ,下次还能接收到这条数据
}
} finally {
connector.disconnect();
}
}
public void run(List<CanalEntry.Entry> entrys) {
for (CanalEntry.Entry entry : entrys) {
if (entry.getEntryType() == CanalEntry.EntryType.TRANSACTIONBEGIN || entry.getEntryType() == CanalEntry.EntryType.TRANSACTIONEND) {
continue;
}
CanalEntry.RowChange rowChage = null;
try {
rowChage = CanalEntry.RowChange.parseFrom(entry.getStoreValue());
} catch (Exception e) {
throw new RuntimeException("ERROR ## parser of eromanga-event has an error , data:" + entry.toString(),
e);
}
CanalEntry.EventType eventType = rowChage.getEventType();
System.out.println(String.format("================> binlog[%s:%s] , name[%s,%s] , eventType : %s",
entry.getHeader().getLogfileName(), entry.getHeader().getLogfileOffset(),
entry.getHeader().getSchemaName(), entry.getHeader().getTableName(),
eventType));
for (CanalEntry.RowData rowData : rowChage.getRowDatasList()) {
if (eventType == CanalEntry.EventType.DELETE) {
printColumn(rowData.getBeforeColumnsList());
} else if (eventType == CanalEntry.EventType.INSERT) {
printColumn(rowData.getAfterColumnsList());
} else {
System.out.println("-------> before");
printColumn(rowData.getBeforeColumnsList());
System.out.println("-------> after");
printColumn(rowData.getAfterColumnsList());
}
}
}
}
private void printColumn(List<CanalEntry.Column> columns) {
for (CanalEntry.Column column : columns) {
System.out.println(column.getName() + " : " + column.getValue() + " update=" + column.getUpdated());
}
}
}