一、基于datastream 转换
sensor_1,1547718199,35.8
sensor_6,1547718201,15.4
sensor_7,1547718202,6.7
sensor_10,1547718205,38.1
sensor_1,1547718129,29.8
sensor_1,1547718158,5.8
sensor_1,1547718140,40.8
sensor_1,1547718111,11.8
package com.tableApiTest
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.scala._
case class SensorReading8(id:String,timestamp:Long,temperature:Double)
object Example1 {
def main(args: Array[String]): Unit = {
val env=StreamExecutionEnvironment.getExecutionEnvironment
val input ="D:\\workspace\\ideastudy\\flinkstudy\\src\\main\\scala\\com\\apitest\\sensor.txt"
val inputStream = env.readTextFile(input)
//1.先转换成样例类类型
val dataStream= inputStream.map(data=>{
val arr = data.split(",")
SensorReading8(arr(0),arr(1).toLong,arr(2).toDouble)
})
//首先创建表执行环境
val tableEnv=StreamTableEnvironment.create(env)
//基于流创建一个表
val dataTable=tableEnv.fromDataStream(dataStream)
//1.调用table api进行转换
val resultTable=dataTable
.select("id,temperature")
.filter("id = 'sensor_1'")
//需要转换再做输出
// resultTable.toAppendStream[(String,Double)].print()
//2.直接用sql实现
tableEnv.createTemporaryView("dataTable",dataTable)
val sql:String ="select id,temperature from dataTable where id='sensor_1'"
val resultSqlTable=tableEnv.sqlQuery(sql)
//需要转换再做输出
resultSqlTable.toAppendStream[(String,Double)].print()
env.execute("table api exapmle")
}
}
二、直接创建table
package com.tableApiTest
import org.apache.flink.api.scala.ExecutionEnvironment
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.{DataTypes, EnvironmentSettings, Table, TableEnvironment}
import org.apache.flink.table.api.scala.{BatchTableEnvironment, StreamTableEnvironment}
import org.apache.flink.table.descriptors.{FileSystem, OldCsv, OldCsvValidator, Schema}
import sun.plugin.cache.OldCacheEntry
import org.apache.flink.table.api.scala._
object TableApiTest {
def main(args: Array[String]): Unit = {
//创建环境
val env=StreamExecutionEnvironment.getExecutionEnvironment
env.setParallelism(1)
val tableEnv=StreamTableEnvironment.create(env)
//2.连接外部系统,读取数据,注册表
val filePath="D:\\workspace\\ideastudy\\flinkstudy\\src\\main\\resources\\sensor.txt"
tableEnv.connect(new FileSystem().path(filePath))
.withFormat(new OldCsv())
.withSchema(new Schema()
.field("id",DataTypes.STRING())
.field("timestamp",DataTypes.BIGINT())
.field("temperature",DataTypes.DOUBLE())
)
.createTemporaryTable("inputTable")
val inputTable:Table=tableEnv.from("inputTable")
inputTable.toAppendStream[(String,Long,Double)].print()
//2.2 从 kafka中读取数据
tableEnv.connect(new Kafka()
.version("0.11")
.topic("sensor")
.property("zookeeper.connect","localhost:2181")
.property("bootstrap.servers","localhost:9092")
)
.withFormat(new Csv())
.withSchema(new Schema()
.field("id",DataTypes.STRING())
.field("timestamp",DataTypes.BIGINT())
.field("temperature",DataTypes.DOUBLE())
)
.createTemporaryTable("kafkaInputTable")
val kafkaInputTable:Table=tableEnv.from("kafkaInputTable")
kafkaInputTable.toAppendStream[(String,Long,Double)].print()
//3.查询转换
//3.1 使用table api
val resultTable = inputTable
.select("id,temperature")
.filter("id = 'sensor_1'")
resultTable.toAppendStream[(String,Double)].print("result")
//3.2 sql
val resultSqlTable=tableEnv.sqlQuery(
"""
|select id,temperature
|from inputTable
|where id='sensor_1'
""".stripMargin
)
resultSqlTable.toAppendStream[(String,Double)].print("sql")
env.execute("table api test")
}
}