sensor.txt
sensor_1,1547718199,35.8
sensor_6,1547718201,15.4
sensor_7,1547718202,6.7
sensor_10,1547718205,38.1
sensor_1,1547718207,37.2
sensor_1,1547718212,33.5
sensor_1,1547718215,38.1
TableAPI
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.scala._
import org.apache.flink.table.api.{DataTypes, Table}
import org.apache.flink.table.descriptors._
/**
* 读取kafka的数据并且转成表
*/
object TableApiTest4 {
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setParallelism(1)
val tableEnv = StreamTableEnvironment.create(env)
val filePath = "D:\\20-Flink\\FlinkTutorial\\src\\main\\resources\\sensor.txt"
tableEnv.connect(new FileSystem().path(filePath))
.withFormat(new Csv()) // 指定csv格式,就是逗号分割的格式,因为kafka输入的数据就是这个格式的.
//创建每个字段的名字和类型
.withSchema(new Schema()
.field("id", DataTypes.STRING())
.field("timestamp", DataTypes.BIGINT())
.field("temperature", DataTypes.DOUBLE())
)
// 创建表名
.createTemporaryTable("inputTable")
// 表的查询
val sensorTable: Table = tableEnv.from("inputTable")
val resultTable: Table = sensorTable
.select('id, 'temperature)
.filter('id === "sensor_1") //等于是 三个等号,
resultTable.toAppendStream[(String, Double)].print("TableAPI")
env.execute("table api test job")
}
}
结果:
TableAPI> (sensor_1,35.8)
TableAPI> (sensor_1,37.2)
TableAPI> (sensor_1,33.5)
TableAPI> (sensor_1,38.1)
FlinkSql
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.scala._
import org.apache.flink.table.api.{DataTypes, Table}
import org.apache.flink.table.descriptors._
/**
* 读取kafka的数据并且转成表
*/
object TableApiTest4 {
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setParallelism(1)
val tableEnv = StreamTableEnvironment.create(env)
val filePath = "D:\\20-Flink\\FlinkTutorial\\src\\main\\resources\\sensor.txt"
tableEnv.connect(new FileSystem().path(filePath))
.withFormat(new Csv()) // 指定csv格式,就是逗号分割的格式,因为kafka输入的数据就是这个格式的.
//创建每个字段的名字和类型
.withSchema(new Schema()
.field("id", DataTypes.STRING())
.field("timestamp", DataTypes.BIGINT())
.field("temperature", DataTypes.DOUBLE())
)
// 创建表名
.createTemporaryTable("inputTable")
// SQL简单查询
val resultSqlTable: Table = tableEnv.sqlQuery(
"""
|select id, temperature
|from inputTable
|where id = 'sensor_1'
""".stripMargin)
resultSqlTable.toAppendStream[(String, Double)].print("FlinkSql")
env.execute("table api test job")
}
}
结果:
FlinkSql> (sensor_1,35.8)
FlinkSql> (sensor_1,37.2)
FlinkSql> (sensor_1,33.5)
FlinkSql> (sensor_1,38.1)