简单案例应用
pom文件:
<dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-java</artifactId> <version>1.10.1</version> </dependency> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-streaming-java_2.12</artifactId> <version>1.10.1</version> </dependency> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-table-planner_2.12</artifactId> <version>1.10.1</version> </dependency> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-table-planner-blink_2.12</artifactId> <version>1.10.1</version> </dependency>
文件数据:SensorReading类
sensor_1,1547718199,35.8 sensor_1,1547718199,23.8 sensor_6,1547718201,15.4 sensor_7,1547718202,6.7 sensor_7,1547718202,3.7 sensor_10,1547718205,38.1 sensor_10,1543228205,32.1 sensor_10,1147248205,40.1
package com.yrl.table;
import com.yrl.bean.SensorReading;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
public class Test01 {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
DataStreamSource<String> socketTextStream = env.readTextFile("D:\\大数据组件API\\Flink\\Flink01\\src\\main\\resources\\test.txt");
DataStream<SensorReading> dataStream = socketTextStream.map(value -> {
String[] split = value.split(",");
return new SensorReading(split[0], new Long(split[1]), new Double(split[2]));
});
//创建表的执行环境
StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
//基于流创建表,Table类型的对象
Table table = tableEnv.fromDataStream(dataStream);
//操作表api
Table res = table.select("id,temperature").where("id='sensor_1'");
//注册一个表
tableEnv.createTemporaryView("sensor",table);
//执行sql
String sql = "select id,temperature from sensor where id='sensor_1'";
Table sqlQuery = tableEnv.sqlQuery(sql);
//输出
tableEnv.toAppendStream(res,Row.class).print("table");
tableEnv.toAppendStream(sqlQuery, Row.class).print("sql");
env.execute();
}
}
新老版本Table环境创建
package com.yrl.table;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.java.BatchTableEnvironment;
import org.apache.flink.table.api.java.StreamTableEnvironment;
public class Test02 {
public static void main(String[] args) {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
//1.1基于老版本planner的流处理
EnvironmentSettings oldEnvSettings = EnvironmentSettings.newInstance().useOldPlanner()..inStreamingMode().build();
StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env,oldEnvSettings);
//1.2基于老版本planner的批处理
ExecutionEnvironment batchEnv = ExecutionEnvironment.getExecutionEnvironment();
BatchTableEnvironment batchTableEnv = BatchTableEnvironment.create(batchEnv);
//2.1基于新版本blink的流处理
EnvironmentSettings newEnvSettings = EnvironmentSettings.newInstance().useBlinkPlanner()..inStreamingMode().build();
StreamTableEnvironment tableEnv2 = StreamTableEnvironment.create(env,newEnvSettings);
//2.2基于新版本blink的批处理
EnvironmentSettings newEnvSettings2 = EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build();
TableEnvironment batchTableEnv2 = TableEnvironment.create(newEnvSettings2);
}
}