1.kafka 的sink
import java.util.Properties
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer011, FlinkKafkaProducer011}
object KafkaExample {
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setParallelism(1)
val props = new Properties()
props.put("bootstrap.servers", "localhost:9092")
props.put("group.id", "consumer-group")
props.put(
"key.deserializer",
"org.apache.kafka.common.serialization.StringDeserialization"
)
props.put(
"value.deserializer",
"org.apache.kafka.common.serialization.StringDeserialization"
)
props.put("auto.offset.reset", "latest")
val stream = env
.addSource(
new FlinkKafkaConsumer011[String](
"test",
new SimpleStringSchema(),
props
)
)
stream.addSink(
new FlinkKafkaProducer011[String](
"localhost:9092",
"test",
new SimpleStringSchema()
)
)
stream.print()
env.execute()
}
}
2.Redis 的Sink
import com.atguigu.day2.{SensorReading, SensorSource}
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.redis.RedisSink
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig
import org.apache.flink.streaming.connectors.redis.common.mapper.{RedisCommand, RedisCommandDescription, RedisMapper}
object SinkToRedis {
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setParallelism(1)
val stream = env.addSource(new SensorSource)
val conf = new FlinkJedisPoolConfig.Builder().setHost("127.0.0.1").build()
stream.addSink(new RedisSink[SensorReading](conf, new MyRedisMapper))
env.execute()
}
class MyRedisMapper extends RedisMapper[SensorReading] {
override def getCommandDescription: RedisCommandDescription = {
new RedisCommandDescription(RedisCommand.HSET, "sensor")
}
override def getKeyFromData(t: SensorReading): String = t.id
override def getValueFromData(t: SensorReading): String = t.temperature.toString
}
}
3.SinkToMySQL
import java.sql.{Connection, DriverManager, PreparedStatement}
import com.atguigu.day2.{SensorReading, SensorSource}
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
object SinkToMySQL {
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setParallelism(1)
val stream = env.addSource(new SensorSource)
stream.addSink(new MyJdbcSink)
env.execute()
}
class MyJdbcSink extends RichSinkFunction[SensorReading] {
var conn: Connection = _
var insertStmt: PreparedStatement = _
var updateStmt: PreparedStatement = _
override def open(parameters: Configuration): Unit = {
conn = DriverManager.getConnection(
"jdbc:mysql://localhost:3306/test",
"root",
"root"
)
insertStmt = conn.prepareStatement(
"INSERT INTO temperatures (sensor, temp) VALUES (?, ?)"
)
updateStmt = conn.prepareStatement(
"UPDATE temperatures SET temp = ? WHERE sensor = ?"
)
}
override def invoke(value: SensorReading, context: SinkFunction.Context[_]): Unit = {
updateStmt.setDouble(1, value.temperature)
updateStmt.setString(2, value.id)
updateStmt.execute()
if (updateStmt.getUpdateCount == 0) {
insertStmt.setString(1, value.id)
insertStmt.setDouble(2, value.temperature)
insertStmt.execute()
}
}
override def close(): Unit = {
insertStmt.close()
updateStmt.close()
conn.close()
}
}
}
4.写入到ES
import java.util
import com.atguigu.day2.{SensorReading, SensorSource}
import org.apache.flink.api.common.functions.RuntimeContext
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.elasticsearch.{ElasticsearchSinkFunction, RequestIndexer}
import org.apache.flink.streaming.connectors.elasticsearch7.ElasticsearchSink
import org.apache.http.HttpHost
import org.elasticsearch.client.Requests
object SinkToES {
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setParallelism(1)
val stream = env
.addSource(new SensorSource)
val httpHosts = new util.ArrayList[HttpHost]()
httpHosts.add(new HttpHost("localhost", 9200))
val esSinkBuilder = new ElasticsearchSink.Builder[SensorReading](
httpHosts,
new ElasticsearchSinkFunction[SensorReading] {
override def process(t: SensorReading,
runtimeContext: RuntimeContext,
requestIndexer: RequestIndexer): Unit = {
val json = new util.HashMap[String, String]()
json.put("data", t.toString)
val indexRequest = Requests
.indexRequest()
.index("sensor")
.source(json)
requestIndexer.add(indexRequest)
}
}
)
esSinkBuilder.setBulkFlushMaxActions(10)
stream.addSink(esSinkBuilder.build())
env.execute()
}
}