val conf = new SparkConf().setAppName("df").setMaster("local") val sc = new SparkContext(conf) val sqlContext = new SQLContext(sc) //创建一个普通RDD val rdd = sc.textFile("G:\\qf大数据\\spark\\day06_sql\\students.txt") val student: RDD[Student] = rdd.map(x => {
val sp = x.split(" ") Student(sp(0).toInt, sp(1), sp(2).toInt) }) import sqlContext.implicits._ val df: Dat