Flink Sink
一、 Flink写入不同文档
二、封装方法,从kafka读取,再写入kafka
三、自定义数据源写入Mysql
四、写入Redis
创建Scala-Maven 工程
导入pom依赖(Flink所有可能用到的依赖)
<properties>
<scala.version>2.12.10</scala.version>
<mysql.version>8.0.11</mysql.version>
<flink.version>1.13.0</flink.version>
<encoding>UTF-8</encoding>
</properties>
<dependencies>
<!-- 导入scala的依赖 -->
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${
scala.version}</version>
</dependency>
<!-- flink scala -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-scala_2.12</artifactId>
<version>${
flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-scala_2.12</artifactId>
<version>${
flink.version}</version>
</dependency>
<!-- flink图计算 -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-gelly_2.12</artifactId>
<version>${
flink.version}</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${
mysql.version}</version>
</dependency>
<!-- flink连接kafka -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_2.12</artifactId>
<version>${
flink.version}</version>
</dependency>
<!-- flink连接mysql -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc_2.12</artifactId>
<version>${
flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.12</artifactId>
<version>${
flink.version}</version>
</dependency>
<!--存储 rockdb
另外还有 本地
hdfs
-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-statebackend-rocksdb_2.12</artifactId>
<version>${
flink.version}<