flink sql 之从dataStream和ddl中定义processtime(3)

代码

package flinkSql

import java.sql.Timestamp

import flinkSourse.SensorReading
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.table.api.{EnvironmentSettings, Table}
import org.apache.flink.table.api.scala._

case class SensorReading(id: String, timestamp: Long, temperature: Double)

object FlinkSqlLession3_DatastreamToSqlProcessTime {
  def main(args: Array[String]): Unit = {
    val executionEnvironment: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    //ddl形式必须使用blink planer ,2.1 blink版本planer的流处理,有setting的情况
    val blinkStreamSettings: EnvironmentSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build()
    val tableEnvironment: StreamTableEnvironment = StreamTableEnvironment.create(executionEnvironment, blinkStreamSettings)


    //    //1、从流里面定义processtime,相当于增加了一个Timestamp类型的列,转为table,执行sql
    //    val stream2: DataStream[String] = executionEnvironment.socketTextStream("127.0.0.1", 1111)
    //    val transforStream: DataStream[SensorReading] = stream2.map(data => {
    //      val tmpList: Array[String] = data.split(",")
    //      SensorReading(tmpList(0), tmpList(1).toLong, tmpList(2).toDouble)
    //    })
    //    tableEnvironment.createTemporaryView("sensorTable", transforStream, 'id, 'temperature, 'processtime.proctime)

    //2、从创建表定义process time
    val sourceDDl: String =
      """
        | create table sensorTable(
        | id varchar(20),
        | timestampdata bigint,
        | temperature double,
        | processtime as PROCTIME()
        | ) with (
        | 'connector.type'='filesystem',
        | 'connector.path'='/Users/zha/untitled/src/main/resources/sensorReading.txt',
        | 'format.type'='csv'
        | )
        |""".stripMargin

    tableEnvironment.sqlUpdate(sourceDDl)


    val sql: String = "select id,temperature,processtime from sensorTable where id='sensor_1'"
    val sqlTable: Table = tableEnvironment.sqlQuery(sql)
    sqlTable.toAppendStream[(String, Double, Timestamp)].print("sqlResult")
    //输出:
    //sqlResult:11> (sensor_1,32.9,2021-10-30 08:15:01.427)
    //sqlResult:12> (sensor_1,23.0,2021-10-30 08:15:28.186)


    executionEnvironment.execute("flink sql")
  }
}

输入:

sensor_1,1547718199,32.9
sensor_1,1547718213,23

    输出:
    sqlResult:11> (sensor_1,32.9,2021-10-30 08:15:01.427)
    sqlResult:12> (sensor_1,23.0,2021-10-30 08:15:28.186)
 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值