使用flink将kafka中的主题数据做一些转换再写到kafka中新的主题中

1.新建maven项目——myflink

2.导入pom.xml依赖

<properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    <maven.compiler.source>1.8</maven.compiler.source>
    <maven.compiler.target>1.8</maven.compiler.target>
    <flink.version>1.7.2</flink.version>
    <kafka.version>2.0.0</kafka.version>
  </properties>

  <dependencies>
    <dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <version>4.11</version>
      <scope>test</scope>
    </dependency>
    <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-scala -->
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-scala_2.11</artifactId>
      <version>${flink.version}</version>
    </dependency>
    <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-streaming-scala -->
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-streaming-scala_2.11</artifactId>
      <version>${flink.version}</version>
    </dependency>
    <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-clients -->
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-clients_2.11</artifactId>
      <version>${flink.version}</version>
    </dependency>
    <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-shaded-hadoop-2-uber -->
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-shaded-hadoop-2-uber</artifactId>
      <version>2.4.1-9.0</version>
    </dependency>
    <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-connector-kafka -->
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-connector-kafka_2.11</artifactId>
      <version>${flink.version}</version>
    </dependency>
    <!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka -->
    <dependency>
      <groupId>org.apache.kafka</groupId>
      <artifactId>kafka_2.11</artifactId>
      <version>${kafka.version}</version>
    </dependency>
    <!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients -->
    <dependency>
      <groupId>org.apache.kafka</groupId>
      <artifactId>kafka-clients</artifactId>
      <version>${kafka.version}</version>
    </dependency>
  </dependencies>

3.创建WriteKafka.scala

package cn.alisa.myflink.exp

import java.util.Properties

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer, FlinkKafkaProducer}
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.ProducerConfig

//flink将一个主题中的数据写入到kafka另一个新主题中
object WriteKafka {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    val prop = new Properties()
    prop.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.21.130:9092")
    prop.put(ConsumerConfig.GROUP_ID_CONFIG, "alisa")
    prop.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer")
    prop.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer")
    val ds = env.addSource(
      //使用FlinkKafkaConsumer以及SimpleStringSchema序列化类,读取kafka数据
      new FlinkKafkaConsumer[String](
//        "user_friends_raw",
        "event_attendees_raw",
        new SimpleStringSchema(),
        prop
      ).setStartFromEarliest()
    )

/*    //对user_friends_raw做一些处理
    val ff: DataStream[String] = ds.filter(line => {
      var reg = ",$".r
      val iter = reg.findAllMatchIn(line)
      !iter.hasNext
    }).flatMap(line => {
      var info = line.split(",")
      info(1).split(" ").map(info(0) + "," + _)
    })*/

    //对event_attendees_raw做一些处理
    val ff = ds.flatMap(line => {
      val info = line.split(",", -1)
      val yes = info(1).split(" ").map((info(0), _, "yes"))
      val maybe = info(2).split(" ").map((info(0), _, "maybe"))
      val invited = info(3).split(" ").map((info(0), _, "invited"))
      val no = info(4).split(" ").map((info(0), _, "no"))
      yes ++ maybe ++ invited ++ no
    }).filter(_._2 != "").map(_.productIterator.mkString(","))

    val wprop = new Properties()
    wprop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.21.130:9092")
    wprop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
    wprop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
    env.setParallelism(5)
    ff.addSink(new FlinkKafkaProducer[String](
    "192.168.21.130:9092",
//    "user_friends_ff",
      "event_attendees_ff",
    new SimpleStringSchema()
  )).name("flink-write")
    env.execute("write")
  }
}

 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值