sink 写入 kafka
2.4.2 将数据写入 kafka (Kafka Sink)
开启消费者
./bin/kafka-console-consumer.sh --bootstrap-server 192.168.23.97:9092 --topic kafkasink
开启服务者
./bin/kafka-console-producer.sh --broker-list 192.168.23.97:9092 --topic sensor
package com.flink.sink.study
import java.util.Properties
import com.flink.streamapi.study.SensorReading
import org.apache.flink.api.common.serialization.{SimpleStringEncoder, SimpleStringSchema}
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer, FlinkKafkaProducer, FlinkKafkaProducer011}
object KafkaSink {
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
// 创建服务者
val properties = new Properties()
properties.put("bootstrap.servers", "192.168.23.97:9092")
val stream = env.addSource(new FlinkKafkaConsumer[String]("sensor", new SimpleStringSchema(), properties))
// 处理服务者提供的信息
val resStream = stream
.map(x => {
val arr = x.split(", ")
SensorReading(arr(0), arr(1).toLong, arr(2).toDouble).toString
})
// 创建消费者
resStream.addSink( new FlinkKafkaProducer[String]("kafkasink", new SimpleStringSchema(),properties))
resStream.print()
env.execute("kafka sink")
}
}