package cn.edu.tju.demo2;
import cn.edu.tju.demo.Test29;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.*;
import org.apache.flink.table.sources.CsvTableSource;
import org.apache.flink.table.sources.TableSource;
import org.apache.flink.types.Row;
import java.util.Map;
public class Test34 {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
EnvironmentSettings environmentSettingsBlinkBatch =
EnvironmentSettings.newInstance()
.useBlinkPlanner()
.inStreamingMode()
.build();
StreamTableEnvironment tableEnv =
StreamTableEnvironment.create(env, environmentSettingsBlinkBatch);
String filePath = "dataInfo2.txt";
tableEnv.connect(new FileSystem().path(filePath))
.withFormat(new Csv())
.withSchema(new Schema().field("ts", DataTypes.BIGINT()
).field("info", DataTypes.STRING()).field("val", DataTypes.DOUBLE()))
.createTemporaryTable("data_info");
String sql = "select ts,info,val from data_info";
Table queryTable = tableEnv.sqlQuery(sql);
queryTable.printSchema();
System.out.println("########################################");
DataStream<Row> resultDataStream = tableEnv.toAppendStream(queryTable, Row.class);
resultDataStream.print();
env.execute("my job");
}
}
dataInfo2.txt
1689997890000,hi world,22
1689997990000,hello world,33