flink-cdc 实现MySQL变更捕获

着重点在本地调试flink-cdc踩坑

工具选型

  1. MySQL 5.7.31
  2. flink 1.13.3 , flink-connector-mysql-cdc 2.2-SNAPSHOT
  3. Flink-CDC文档

选择flink-cdc的原因是为简化cdc过程中依赖的工具链,flink-cdc通过复用debezium的connect和kafka-connect实现直连flink,再者可通过flink平台适配的各种source sink和SQL client 轻松实现数据源同步。

参考过程

本地调试也需要flink的依赖, 注意冲突此处使用1.13.6 [国内仓库下载]
(https://mirrors.huaweicloud.com/apache/flink/)

MySQL 配置

[mysqld]
server-id         = 1
log_bin           = mysql-bin
binlog_format     = ROW
binlog_row_image  = FULL
expire_logs_days  = 10

日志配置 log4j2.properties

本地调试flink非常重要的点,很多错误不加日志配置根本看不到

monitorInterval=30
# This affects logging for both user code and Flink
rootLogger.level=INFO
logger.aaron.level=INFO
logger.aaron.name=org.apache.flink
rootLogger.appenderRef.file.ref=MainAppender
logger.akka.name=akka
logger.akka.level=INFO
logger.kafka.name=org.apache.kafka
logger.kafka.level=INFO
logger.hadoop.name=org.apache.hadoop
logger.hadoop.level=INFO
logger.zookeeper.name=org.apache.zookeeper
logger.zookeeper.level=INFO
logger.shaded_zookeeper.name=org.apache.flink.shaded.zookeeper3
logger.shaded_zookeeper.level=INFO
# Log all infos in the given file
appender.main.name=MainAppender
appender.main.type=RollingFile
appender.main.append=true
appender.main.fileName=console.log
appender.main.filePattern=console.log.%i
appender.main.layout.type=PatternLayout
appender.main.layout.pattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
appender.main.policies.type=Policies
appender.main.policies.size.type=SizeBasedTriggeringPolicy
appender.main.policies.size.size=100MB
appender.main.policies.startup.type=OnStartupTriggeringPolicy
appender.main.strategy.type=DefaultRolloverStrategy
appender.main.strategy.max=${env:MAX_LOG_FILE_NUMBER:-10}
logger.netty.name=org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline
logger.netty.level=OFF

示例代码

import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.runtime.state.storage.FileSystemCheckpointStorage;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;


public class Application {

    public static void main(String[] args) throws Exception {
        mysql_cdc();
    }
    public static void mysql_cdc() throws Exception {
    	Properties properties = new Properties();
        properties.setProperty("decimal.handling.mode", "double"); //debezium 小数转换处理策略
        properties.setProperty("database.serverTimezone", "GMT+8"); //debezium 配置以database. 开头的属性将被传递给jdbc url
        
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname("192.168.31.233")
                .port(3306)
                .databaseList("test_database") // set captured database
                .tableList("test_database.test_table") // set captured table
                .username("root")
                .password("123456")
                .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
                .debeziumProperties(properties)
                .build();

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // enable checkpoint
        env.enableCheckpointing(3000);
        env.getCheckpointConfig().setCheckpointStorage(
                new FileSystemCheckpointStorage("file:///flink-ck/checkpoints"));

        env
                .fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL Source")
                // set 4 parallel source tasks
                .setParallelism(4)
                .print().setParallelism(1); // use parallelism 1 for sink to keep message ordering

        env.execute("Print MySQL Snapshot + Binlog");
    }

}

依赖参考

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>org.example</groupId>
    <artifactId>flink-cdc</artifactId>
    <version>1.0-SNAPSHOT</version>

    <properties>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
        <flink.version>1.13.6</flink.version>
        <java.version>1.8</java.version>
        <scala.binary.version>2.11</scala.binary.version>
        <maven.compiler.source>${java.version}</maven.compiler.source>
        <maven.compiler.target>${java.version}</maven.compiler.target>
    </properties>

    <dependencies>
        <!--基础依赖 flink-streaming-java flink-clients 有这两个包就能本地跑 -->
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-table-api-java-bridge_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-clients_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
        </dependency>

        <dependency>
            <groupId>com.ververica</groupId>
            <artifactId>flink-connector-mysql-cdc</artifactId>
            <version>2.2.1</version>
        </dependency>
        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <version>8.0.22</version>
        </dependency>

		<!-- 日志框架 -->
		<dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-api</artifactId>
            <version>2.14.1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-slf4j-impl</artifactId>
            <version>2.14.1</version>
        </dependency>

    </dependencies>

    <build>
        <plugins>

            <!-- Java Compiler -->
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <version>3.1</version>
                <configuration>
                    <source>${java.version}</source>
                    <target>${java.version}</target>
                </configuration>
            </plugin>

            <!-- We use the maven-shade plugin to create a fat jar that contains all necessary dependencies. -->
            <!-- Change the value of <mainClass>...</mainClass> if your program entry point changes. -->
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-shade-plugin</artifactId>
                <version>3.0.0</version>
                <executions>
                    <!-- Run shade goal on package phase -->
                    <execution>
                        <phase>package</phase>
                        <goals>
                            <goal>shade</goal>
                        </goals>
                        <configuration>
                            <createDependencyReducedPom>false</createDependencyReducedPom>
                            <artifactSet>
                                <excludes>
                                    <exclude>org.apache.flink:force-shading</exclude>
                                    <exclude>com.google.code.findbugs:jsr305</exclude>
                                    <exclude>org.slf4j:*</exclude>
                                    <exclude>log4j:*</exclude>
                                </excludes>
                            </artifactSet>
                            <filters>
                                <filter>
                                    <!-- Do not copy the signatures in the META-INF folder.
                                    Otherwise, this might cause SecurityExceptions when using the JAR. -->
                                    <artifact>*:*</artifact>
                                    <excludes>
                                        <exclude>META-INF/*.SF</exclude>
                                        <exclude>META-INF/*.DSA</exclude>
                                        <exclude>META-INF/*.RSA</exclude>
                                    </excludes>
                                </filter>
                            </filters>
                            <transformers>
                                <transformer
                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
                                    <mainClass>com.example.cdc.Application</mainClass>
                                </transformer>
                            </transformers>
                        </configuration>
                    </execution>
                </executions>
            </plugin>
        </plugins>

    </build>
</project>

完了开跑

第一次是全量快照, 之后是binlog的offset拉取,Flink Checkpoint 持久化断点续传

{"before":null,"after":{"id":1,"name":"zhangsan"},"source":{"version":"1.5.4.Final","connector":"mysql","name":"mysql_binlog_source","ts_ms":1640677288811,"snapshot":"false","db":"test_database","sequence":null,"table":"test_table","server_id":0,"gtid":null,"file":"","pos":0,"row":0,"thread":null,"query":null},"op":"r","ts_ms":1640677288815,"transaction":null}
{"before":null,"after":{"id":3,"name":"wangwu"},"source":{"version":"1.5.4.Final","connector":"mysql","name":"mysql_binlog_source","ts_ms":1640677288816,"snapshot":"false","db":"test_database","sequence":null,"table":"test_table","server_id":0,"gtid":null,"file":"","pos":0,"row":0,"thread":null,"query":null},"op":"r","ts_ms":1640677288816,"transaction":null}
{"before":null,"after":{"id":2,"name":"lisi"},"source":{"version":"1.5.4.Final","connector":"mysql","name":"mysql_binlog_source","ts_ms":1640677288816,"snapshot":"false","db":"test_database","sequence":null,"table":"test_table","server_id":0,"gtid":null,"file":"","pos":0,"row":0,"thread":null,"query":null},"op":"r","ts_ms":1640677288816,"transaction":null}
十二月 28, 2021 3:41:30 下午 com.github.shyiko.mysql.binlog.BinaryLogClient connect
信息: Connected to 192.168.31.233:3308 at mysql-bin.000003/2526 (sid:6257, cid:36)
{"before":null,"after":{"id":5,"name":"qianqi"},"source":{"version":"1.5.4.Final","connector":"mysql","name":"mysql_binlog_source","ts_ms":1640677322000,"snapshot":"false","db":"test_database","sequence":null,"table":"test_table","server_id":1,"gtid":null,"file":"mysql-bin.000003","pos":2724,"row":0,"thread":null,"query":null},"op":"c","ts_ms":1640677323291,"transaction":null}
{"before":{"id":5,"name":"qianqi"},"after":{"id":5,"name":"钱七"},"source":{"version":"1.5.4.Final","connector":"mysql","name":"mysql_binlog_source","ts_ms":1640677338000,"snapshot":"false","db":"test_database","sequence":null,"table":"test_table","server_id":1,"gtid":null,"file":"mysql-bin.000003","pos":3001,"row":0,"thread":null,"query":null},"op":"u","ts_ms":1640677338533,"transaction":null}

  • 3
    点赞
  • 6
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 2
    评论
Flink CDC(Change Data Capture)是基于 Apache Flink 的一种技术,用于捕获和处理数据源中的更改操作。它主要用于实时数据流处理和数据集成场景,可以从各种数据源(如数据库、消息队列等)中捕获数据更改,并将这些更改以流式方式传递给 Flink 流处理作业。 Flink CDC 提供了以下功能: 1. 数据源连接:Flink CDC 支持与各种数据源的连接,包括关系型数据库(如MySQL、PostgreSQL、Oracle等)、消息队列(如Kafka、RabbitMQ等)以及其他常见的数据存储和消息系统。 2. 数据更改捕获:Flink CDC 可以以低延迟的方式捕获数据源中的更改操作,包括插入、更新和删除。它通过监视源系统的日志或使用特定的协议来实现数据更改的捕获。 3. 数据格式化和转换:Flink CDC 可以将捕获到的数据更改进行格式化和转换,使其适应 Flink 的数据处理模型。这样,你可以在 Flink 中对数据进行实时处理、转换和分析。 4. 并发处理和容错性:Flink CDC 基于 Apache Flink,可以享受到 Flink 提供的并发处理和容错性能。它可以实现高吞吐量、低延迟的数据处理,并具备故障恢复和容错能力。 通过使用 Flink CDC,你可以构建实时的数据流处理应用程序,从而实现实时数据集成、ETL、实时分析和报告等需求。它为你提供了一种方便而灵活的方式来处理不同数据源的更改操作,并将其无缝地与 Flink 的流处理能力结合起来。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

三千烦恼丝xzh

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值