package com.ws.jdbc
import org.apache.spark.sql.{DataFrame, SparkSession}
object CsvSource {
def main(args: Array[String]): Unit = {
val sparkSession = SparkSession.builder().appName("JsonSource").master("local[*]").getOrCreate()
//读取csv数据,指定一个目录 ,数据类型都是String
val data: DataFrame = sparkSession.read.csv("E:\\bigData\\testdata\\move.csv")
//重新命名字段
val dataFrame = data.toDF("id","age","score")
val result = dataFrame.limit(10)
result.printSchema()
result.show()
sparkSession.stop()
}
}
spark sql csv数据源
最新推荐文章于 2022-09-30 01:30:42 发布