1.新建maven项目——myflink
2.导入pom.xml依赖
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<flink.version>1.7.2</flink.version>
<kafka.version>2.0.0</kafka.version>
</properties>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-scala -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-scala_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-streaming-scala -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-scala_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-clients -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-shaded-hadoop-2-uber -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-shaded-hadoop-2-uber</artifactId>
<version>2.4.1-9.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-connector-kafka -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>${kafka.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>${kafka.version}</version>
</dependency>
</dependencies>
3.创建WriteKafka.scala
package cn.alisa.myflink.exp
import java.util.Properties
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer, FlinkKafkaProducer}
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.ProducerConfig
//flink将一个主题中的数据写入到kafka另一个新主题中
object WriteKafka {
def main(args: Array[String]): Unit = {
val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
val prop = new Properties()
prop.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.21.130:9092")
prop.put(ConsumerConfig.GROUP_ID_CONFIG, "alisa")
prop.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer")
prop.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer")
val ds = env.addSource(
//使用FlinkKafkaConsumer以及SimpleStringSchema序列化类,读取kafka数据
new FlinkKafkaConsumer[String](
// "user_friends_raw",
"event_attendees_raw",
new SimpleStringSchema(),
prop
).setStartFromEarliest()
)
/* //对user_friends_raw做一些处理
val ff: DataStream[String] = ds.filter(line => {
var reg = ",$".r
val iter = reg.findAllMatchIn(line)
!iter.hasNext
}).flatMap(line => {
var info = line.split(",")
info(1).split(" ").map(info(0) + "," + _)
})*/
//对event_attendees_raw做一些处理
val ff = ds.flatMap(line => {
val info = line.split(",", -1)
val yes = info(1).split(" ").map((info(0), _, "yes"))
val maybe = info(2).split(" ").map((info(0), _, "maybe"))
val invited = info(3).split(" ").map((info(0), _, "invited"))
val no = info(4).split(" ").map((info(0), _, "no"))
yes ++ maybe ++ invited ++ no
}).filter(_._2 != "").map(_.productIterator.mkString(","))
val wprop = new Properties()
wprop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.21.130:9092")
wprop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
wprop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
env.setParallelism(5)
ff.addSink(new FlinkKafkaProducer[String](
"192.168.21.130:9092",
// "user_friends_ff",
"event_attendees_ff",
new SimpleStringSchema()
)).name("flink-write")
env.execute("write")
}
}