Flink提供了一个CsvTableSource来读取scv文件,返回的是CsvTableSource,然后利用registerTableSource注册为一张表,我们就可以写sql操作这张表了,非常的方便,废话不多说了,直接看下面的demo
package flink.table
import org.apache.flink.api.scala.ExecutionEnvironment
import org.apache.flink.table.api.{TableEnvironment, Types}
import org.apache.flink.table.sinks.CsvTableSink
import org.apache.flink.table.sources.CsvTableSource
/**
* Flink读取csv文件注册为表
*/
object flinkTable {
def main(args: Array[String]): Unit = {
val env = ExecutionEnvironment.getExecutionEnvironment
val tableEnv = TableEnvironment.getTableEnvironment(env)
// 设置表的字段;
val csv = CsvTableSource.builder()
.path("D:/xxx.csv")
.field("id",Types.INT)
.field("url",Types.STRING)
.field("p_name",Types.STRING)
.field("price",Types.DOUBLE)
.field("class_one",Types.INT)
.field("class_two",Types.INT)
.fie