package cn.tedu.sql
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
object Driver {
def main(args: Array[String]): Unit = {
val conf=new SparkConf().setMaster(“local”).setAppName(“sql”)
val sc=new SparkContext(conf)
//--创建SparkSql的上下文对象,用于将RDD->DataFrame,并可以提供基于sql方式操作表
val sqc=new SQLContext(sc)
val r1=sc.makeRDD(List((1,"tom",23),(2,"rose",18),(3,"jim",30),(4,"jary",20)))
//val df=sqc.createDataFrame(r1).toDF("id","name","age")
import sqc.implicits._
//--隐式的将RDD转变为DataFrame,即省略了createDataFrame方法
val df=r1.toDF("id","name","age")
df.registerTempTable("tb1")
val result=sqc.sql("select * from tb1 where age>20")
//--将查询结果存储
val resultRDD=result.toJavaRDD
resultRDD.saveAsTextFile("c://data/sql-result")
}
}