继续讲Flink Table ..........................
这一篇就有点乱七八糟了,都是写案例,说实话,在实际开发中都是Flink SQL,在这里我们还是手写API 然后过了,把重心放在SQL上面。。。。
下面全是手写案例 :
1)
object Over_window { def main(args: Array[String]): Unit = { val env = StreamExecutionEnvironment.getExecutionEnvironment //todo 创建tableEnv val tableEnv = TableEnvironment.getTableEnvironment(env) val stream = env.fromElements(("aa", 192L), ("aa", 192L),("bb", 112L)) val table: Table = tableEnv.fromDataStream(stream, 'id, 'time) //todo 主要四个参数 partitionBy , orderBy , preceding (following,与preceding相反) table.window(Over partitionBy 'id orderBy 'rowtime preceding UNBOUNDED_RANGE as 'window) .select('id,'time.sum over 'window,'time.max over 'window) //todo Over Window 聚合操作 val rs: Table =table.window(Over partitionBy 'id orderBy 'rowtime preceding UNBOUNDED_RANGE as 'window) .select('id,'id.avg over 'window,'time.max over 'window) //todo 去重 val distinctRs: Table = table.window(Over partitionBy 'id orderBy 'rowtime preceding UNBOUNDED_RANGE as 'window) .select('id,'id.avg over 'window,'time.max over 'window) table.distinct() } }
2)
object Table_API { def main(args: Array[String]): Unit = { val env = StreamExecutionEnvironment.getExecutionEnvironment //todo 创建tableEnv val tableEnv = TableEnvironment.getTableEnvironment(env) val stream = env.fromElements(("aa", 192L), ("aa", 192L)) val table: Table = tableEnv.fromDataStream(stream, 'id, 'name) tableEnv.registerDataStream("table2", stream, 'myLong, 'myString) //todo sql查询然后转成流 val rs: DataStream[(Boolean, (String, Long))] = table.groupBy