大数据系列篇-spark-sql使用SQL加DSL方式与RDD-DATAFRAME-DATASET转换
package com.test
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
//测试sql与dsl与转换RDD <-> DATAFRAME <-> DATASET
object SparkSql {
def main(args: Array[String]): Unit = {
val sparkConf = new SparkConf().setAppName("练习SparkSql").setMaster("local[*]")
val spark = SparkSession.builder().config(sparkConf).getOrCreate()
import spark.implicits._
//DataFrame
val df = spark.read.json("data/user.json")
df.show()
df.createOrReplaceTempView("user")
//SQL
spark.sql("SELECT * FROM user").show()
//DSL
df.select("userName").show()
df.select($"age" + 1).show()
//或df.select('age + 1).show()
//DataSet
val ds = Seq(1, 5, 6).toDS()
ds.show()
//RDD<--->DF
val rdd1 = spark.sparkContext.makeRDD(List((1, "user1", 1), (2, "user1", 2)))
val df1 = rdd1.toDF("id", "name", "age")
df1.rdd
//DF<--->DS
val ds1 = df1.as[User]
val df2 = ds1.toDF()
//RDD<--->DS
val ds2 = rdd1.map {
case (id, name, age) => {
User(id, name, age)
}
}.toDS()
ds2.rdd
spark.close()
}
case class User(id: Long, name: String, age: Int)
}