import org.apache.spark.sql.SparkSession
object DF2DS1 {
case class Employee(name: String, age: Long, depId: Long, gender: String, salary: Long)
def main(args: Array[String]): Unit = {
println("astron")
val spark = SparkSession
.builder()
.master("local")
.appName("star")
.getOrCreate()
import spark.implicits._
val demoDf = spark.read.json("d://employee.json")
println(demoDf)
val demoDs = demoDf.as[Employee]
println(demoDs)
val newDemoDf = demoDs.toDF();
println(newDemoDf)
}
astron
[age: bigint, depId: bigint ... 3 more fields]
[age: bigint, depId: bigint ... 3 more fields]
[age: bigint, depId: bigint ... 3 more fields]
root
|-- age: long (nullable = true)
|-- depId: long (nullable = true)
|-- gender: string (nullable = true)
|-- name: string (nullable = true)
|-- salary: long (nullable = true)