sensor_1,1547718199,35.8
sensor_6,1547718201,15.4
sensor_7,1547718202,6.7
sensor_10,1547718205,38.1
sensor_1,1547718129,29.8
sensor_1,1547718158,5.8
sensor_1,1547718140,40.8
package com.tableApiTest
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.DataTypes
import org.apache.flink.table.api.scala._
import org.apache.flink.table.descriptors.{Csv, Kafka, Schema}
object KafkaoutputTest {
def main(args: Array[String]): Unit = {
//创建环境
val env=StreamExecutionEnvironment.getExecutionEnvironment
env.setParallelism(1)
val tableEnv=StreamTableEnvironment.create(env)
//2.2 从 kafka中读取数据
tableEnv.connect(new Kafka()
.version("0.11")
.topic("sensor")
.property("zookeeper.connect","localhost:2181")
.property("bootstrap.servers","localhost:9092")
)
.withFormat(new Csv())
.withSchema(new Schema()
.field("id",DataTypes.STRING())
.field("timestamp",DataTypes.BIGINT())
.field("temperature",DataTypes.DOUBLE())
)
.createTemporaryTable("kafkaInputTable")
val sensorTable=tableEnv.from("inputTable")
//简单转换操作
val resultTable = sensorTable
.select("id,temp")
.filter("id='sensor_1'")
//聚合转换
val aggTable=sensorTable
.groupBy("id")//基于id分组
.select('id,'id.count as 'count)
tableEnv.connect(new Kafka()
.version("0.11")
.topic("testout")
.property("zookeeper.connect","localhost:2181")
.property("bootstrap.servers","localhost:9092")
)
.withFormat(new Csv())
.withSchema(new Schema()
.field("id",DataTypes.STRING())
.field("temp",DataTypes.DOUBLE())
)
.createTemporaryTable("kafkaOutputTable")
resultTable.insertInto("kafkaOutputTable")
env.execute("kafka out put")
}
}