0、获取SparkSession 工具类
package com.bigdata.spark.sql
import org.apache.spark.sql.{
DataFrame, SparkSession}
object SparkSessionUtil {
def util(appName: String):SparkSession= {
SparkSession.builder().master("local[*]").appName(appName).getOrCreate()
}
}
1、加载 json 文件
package com.bigdata.spark.sql
import org.apache.spark.sql.{
DataFrame, Dataset, SparkSession}
object DataSourceDemo {
def main(args: Array[String]): Unit = {
val spark: SparkSession = SparkSession
.builder()
.master("local[*]")
.appName("Test")
.getOrCreate()
import spark.implicits._
val df: DataFrame = spark.read.json("target/classes/user.json")
val ds: Dataset[User] = df.as[User]
ds.foreach(user => println(user.friends(0)))
}
}
case class User(name:String, age: Long, friends: Array[String])
{
"name": "lisi", "age" : 20, "friends": ["lisi", "zs"]}
{
"name": "zs",