首先导入maven依赖
org.apache.spark
spark-core_2.11
2.1.1
org.apache.spark
spark-sql_2.11
2.1.1
dataframe
packagesparksqlimportorg.apache.spark.rdd.RDDimportorg.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
object Demo1 {//创建case类
case classUser(name:String,age:Int)
def main(args: Array[String]): Unit={
//创建sparkconf和sparksession
val conf = new SparkConf().setAppName("sparlsql").setMaster("local[*]")
val spark = SparkSession.builder().config(conf).getOrCreate()
//隐式转换
importspark.implicits._
val raw: RDD[(String, Int)]= spark.sparkContext.makeRDD(List(("zhangsan", 21), ("lisi", 22), ("wangwu", 23)))
val df: DataFrame= raw.toDF("name", "age")
df.show()//创建表格
df.createOrReplaceTempView("