<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>1.12.0</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.12</artifactId>
<version>1.12.0</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.12</artifactId>
<version>1.12.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.1.3</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.49</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-blink_2.12</artifactId>
<version>1.12.0</version>
</dependency>
<dependency>
<groupId>com.ververica</groupId>
<artifactId>flink-connector-mysql-cdc</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.75</version>
</dependency>
java 代码
public class FlinkCDC {
public static void main(String[] args) throws Exception {
//1.创建Flink执行环境
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
//2.通过Flink-cdc 创建SourceFunction
DebeziumSourceFunction<String> SourceFunction = MySqlSource.<String>builder()
.hostname("127.0.0.1")
.port(3306)
.databaseList("cron") // set captured database
// .tableList("yk_rh.yourTableName") // set captured table
.username("root")
.password("123456")
.startupOptions(StartupOptions.initial())
.deserializer(new StringDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
.build();
DataStreamSource<String> stringDataStreamSource = env.addSource(SourceFunction);
//数据打印
stringDataStreamSource.print();
//启动程序
env.execute("Print MySQL Snapshot + Binlog");
}
}