Window操作
package com.atguigu.GTable_api_Flink_sql;
import com.atguigu.Zbeans.SensorReading;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Over;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.Tumble;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.table.planner.expressions.UnboundedRange;
import org.apache.flink.types.Row;
/**
* 测试table api& flink sql中的group/over window
*/
public class CWindowTest {
public static void main(String[]args) throws Exception{
//加载环境
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
EnvironmentSettings settings = EnvironmentSettings.newInstance()
.useBlinkPlanner()
.inStreamingMode().build();
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env,settings); //创建表的执行环境
//将数据读取成流并分配watermark
DataStreamSource<String> inputStream = env.readTextFile("G:\\SoftwareInstall\\idea\\project\\UserBehaviorAnalysis\\BasicKnowledge\\src\\main\\resources\\sensor.txt");
DataStream<SensorReading> mapStream = inputStream.map(line -> {
String[] splits = line.split(",");
return new SensorReading(new String(splits[0]), new Long(splits[1]), new Double(splits[2]));
})
.assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<SensorReading>(Time.seconds(2)) {
@Override
public long extractTimestamp(SensorReading sen) {
return sen.getTimestamp()*1000L;
}
});
//将流转换成表,定义时间特性
//tableEnv.fromDataStream(mapStream,"id,timestamp as ts,temperature as temp,pt.proctime");//追加处理时间字段,别名pt
Table dataTable = tableEnv.fromDataStream(mapStream, "id,timestamp as ts,temperature as temp,rt.rowtime");//将原来的时间字段定义为事件时间字段,别名ts
tableEnv.createTemporaryView("sensor",dataTable);//注册成视图,便于使用sql api
/**
* Table api&Flink SQL的group window操作
* DataStream api是先分组在开窗,而Table api&Flink SQL则正好相反,先开窗再分组,且窗口字段必须在分组字段中。
* Flink SQL中tumble_end和hop_end是窗口结束时间。
*/
Table resultTable1 = dataTable.window(Tumble.over("10.seconds").on("rt").as("tw"))
.groupBy("id,tw")
.select("id,id.count as cou_id,temp.avg as avg_temp,tw.end");//tw.end获取窗口结束时间
Table resultTable2 = tableEnv.sqlQuery("select id,count(id) as cou_id,avg(temp) as avg_temp,tumble_end(rt,interval '10' second) "
+ " from sensor group by id,tumble(rt,interval '10' second)");//flink sql的时间单位必须是单数
/**
* Table api&Flink SQL的over window操作
* 处理时间:pt 对行数无界开窗:UNBOUNDED_ROW 对范围无界开窗:UNBOUNDED_RANGE
* preceding写10.seconds 10.rows为有界开窗。
* partitionBy可选。preceding可选,不写也会聚合之前所有的记录。
*/
Table resultTable3 = dataTable.window(Over.partitionBy("id").orderBy("rt").preceding("2.rows").as("ow"))
.select("id,rt,id.count over ow ,temp.avg over ow");
Table resultTable4 = tableEnv.sqlQuery("select id,rt,count(id) over ow,avg(temp) over ow from " +
" sensor window ow as(partition by id order by rt rows between 2 preceding and current row)");
/**
* 打印输出
* 如果table api&Flink sql有非窗口内的聚合操作,则必须使用toRetractStream输出。
* 如果聚合操作在窗口内,则可以直接使用toAppendStream或insertInto
* 究其原因在于,非窗口内的聚合会更新动态结果表,而窗口内的聚合则只是到达窗口结束时间将结果输出到动态结果表。
*/
dataTable.printSchema(); //打印表结构
tableEnv.toAppendStream(resultTable1, Row.class).print("Table api的group window操作");
tableEnv.toAppendStream(resultTable2, Row.class).print("Flink sql的group window操作");
tableEnv.toAppendStream(resultTable3, Row.class).print("Table api的over window操作");
tableEnv.toAppendStream(resultTable4, Row.class).print("Flink sql的over window操作");
//执行
env.execute("测试Table api&Flink sql的window操作");
}
}
时间操作
如何添加时间字段
DS转table时
table.fromDataStream(ds,"id,salary,pt.proctime") 最后一个字段,pt为处理时间字段别名。
table.fromDataStream(ds,"id,salary,rt.rowtime") 最后一个字段,rt为事件时间字段别名。
table.fromDataStream(ds,"id,timestamp.rowtime rt") 将时间字段指定为事件时间,rt为别名。事件时间字段会变成以毫秒为单位。
定义schema时(可能都无法生效)
.field("pt",DataTypes.TIMESTAMP(3)).proctime()
可能会报错。TIMESTAMP(3)代表秒后有三位,即毫秒。
.rowtime(new Rowtime()
.timestampsFromField("timestamp").watermarksPeriodicBounded(1000))
无法生效。
创建源&目标表时
pt as PROCTIME() 最后一个字段,必须是blink 的计划器
rt as to_timestamp(from_unixtime(timestamp)),
watermark for rt as rt - interval '1' second 必须是blink 的计划器