FlinkSql 读 Kafka
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
import org.apache.flink.types.Row
/**
* flink sql read kafka
*
* create by LiuJinHe 2020/8/12
*/
object FlinkSqlReadKafka {
def main(args: Array[String]): Unit = {
// 初始化 stream 环境
// 本地测试,需要 flink-runtime-web 依赖
val env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI()
env.setParallelism(1)
// val env = StreamExecutionEnvironment.getExecutionEnvironment
// 创建 streamTable 环境
val settings: EnvironmentSettings = EnvironmentSettings
.newInstance()
.useBlinkPlanner()
.inStreamingMode()
.build()
val tableEnv: StreamTableEnvironment = StreamTableEnvironment.create(env, settings)
val kafkaSql =
"""
|create temporary table kafkaTable (
| user_name string,
| user_id bigint,
| item_id bigint
|) with (
| 'connector' = 'kafka',
| 'topic' = 'flink_test',
| 'properties.bootstrap.servers' = 'localhost:9092',
| 'properties.group.id' = 'flink-test-group',
| 'format' = 'json',
| 'scan.startup.mode' = 'latest-offset'
|)
""".stripMargin
tableEnv.executeSql(kafkaSql)
val table = tableEnv.from("kafkaTable")
// val table = tableEnv.sqlQuery("select * from kafkaTable")
val resultDStream = tableEnv.toAppendStream[Row](table)
resultDStream.print()
env.execute("flink sql read kafka")
}
}
读写 Kafka
import org.apache.flink.api.common.restartstrategy.RestartStrategies
import org.apache.flink.streaming.api.{CheckpointingMode, TimeCharacteristic}
import org.apache.flink.streaming.api.environment.