sensor_1,1547718199,35.8
sensor_6,1547718201,15.4
sensor_7,1547718202,6.7
sensor_10,1547718205,38.1
sensor_1,1547718129,29.8
sensor_1,1547718158,5.8
sensor_1,1547718140,40.8
sensor_1,1547718111,11.8
package com.tableApiTest
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.{DataTypes, Table}
import org.apache.flink.table.api.scala._
import org.apache.flink.table.descriptors.{Csv, FileSystem, Schema}
object FileOutPutTest {
def main(args: Array[String]): Unit = {
//创建环境
val env=StreamExecutionEnvironment.getExecutionEnvironment
env.setParallelism(1)
val tableEnv=StreamTableEnvironment.create(env)
val filePath="D:\\workspace\\ideastudy\\flinkstudy\\src\\main\\resources\\sensor.txt"
tableEnv.connect(new FileSystem().path(filePath))
.withFormat(new Csv())
.withSchema(new Schema()
.field("id",DataTypes.STRING())
.field("timestamp",DataTypes.BIGINT())
.field("temp",DataTypes.DOUBLE())
)
.createTemporaryTable("inputTable")
val sensorTable=tableEnv.from("inputTable")
//简单转换操作
val resultTable = sensorTable
.select("id,temp")
.filter("id='sensor_1'")
//聚合转换
val aggTable=sensorTable
.groupBy("id")//基于id分组
.select('id,'id.count as 'count)
// resultTable.toAppendStream[(String,Double)].print("result")
// aggTable.toRetractStream[(String,Long)].print("agg")
//输出到文件
//注册输出表
val output="D:\\workspace\\ideastudy\\flinkstudy\\src\\main\\resources\\output.txt"
tableEnv.connect(new FileSystem().path(output))
.withFormat(new Csv())
.withSchema(new Schema()
.field("id",DataTypes.STRING())
.field("temp",DataTypes.DOUBLE())
)
.createTemporaryTable("outputTable")
resultTable.insertInto("outputTable")
//下面这个会报错,insertInfo 输出到文件是属于 AppendStreamTableSink 只能是插入的操作 不能是
//更新类的,原有的格式 要和输出的格式一致才可以写入
aggTable.insertInto("outputTable")
env.execute("file sink")
}
}