需加入隐式导入才能toDF
val spark = SparkSession.builder().appName("ch2homework1").master("local[4]").getOrCreate()
import spark.implicits._ //不加入隐式导入则不能toDF
编译代码
object homework01 {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("ch2homework01").setMaster("local[4]")
val sc = new SparkContext(conf)
val spark = SparkSession.builder().appName("ch2homework1").master("local[4]").getOrCreate()
case class names(name:String,count:Int)
import spark.implicits._ //不加入隐式导入则不能toDF
val data = sc.textFile("file:///E://FTP//spark//2-sparkCore1//kddcup.data.gz").flatMap(_.split("\n")).map(line =>
names((line.split(",").reverse(0)), 1)).toDF()
data.show()
}
}
出现
Error:(31, 46) value toDF is not a member of org.apache.spark.rdd.RDD[names]
解决:
将case class names(name:String,count:Int)放到需要用到names方法的方法体外
object homework01 {
case class names(name:String,count:Int)
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("ch2homework01").setMaster("local[4]")
val sc = new SparkContext(conf)
val spark = SparkSession.builder().appName("ch2homework1").master("local[4]").getOrCreate()
import spark.implicits._
val data = sc.textFile("file:///E://FTP//spark//2-sparkCore1//kddcup.data.gz").flatMap(_.split("\n")).map(line =>
names((line.split(",").reverse(0)), 1)).toDF()
data.show()
}
}
~~