flink默认支持了多种sink,如kafka和es,如果没有flink默认支持的,可以使用Apache Bair
一 连接kafka
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka-0.11_2.12</artifactId>
<version>1.10.1</version>
</dependency>
import org.apache.flink.api.common.serialization.{
SimpleStringEncoder, SimpleStringSchema}
import org.apache.flink.streaming.api.scala.{
DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011
import org.apache.flink.streaming.connectors.kafka.internal.FlinkKafkaProducer
import org.apache.flink.api.scala._
import org.apache.flink.core.fs.Path
import java.util.Properties
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink
object AddSink {
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val read_text = env.readTextFile("C:\\Users\\Administrator\\Desktop\\文档\\01.txt")
val data:DataStream[String]=read_text.map(x=>{
val gf = x.split(" ")
hotel(gf(0),gf(1),gf(2)).toString
})