val conf = new SparkConf().setAppName("sort").setMaster("local[*]")
val sc = new SparkContext(conf)
val sprak = SparkSession.builder().config(conf).getOrCreate()import sprak.implicits._
//普通文本文件
val file= sc.textFile("E:\\33\\88/02.txt")
//json文件
val f = sprak.read.json("E:/33/88/03.txt")
spark生成Dataframe并生成表文件
//json格式
val f = sprak.read.json("E:/33/88/03.txt").toDF().createOrReplaceTempView("jj")
//普通文本格式
val file= sc.textFile("E:\\33\\88/02.txt")
val tmp = file.map(x=>(x.split(" ")(0),x.split(" ")(1),x.split(" ")(2),x.split(" ")(3),x.split(" ")(4),x.split(" ")(5),x.split(" ")(6)))
val s = tmp.map(x=>ClassScore(x._1,x._2,x._3.toInt,x._4.toInt,x._5.toInt,x._6.toInt,x._7.toInt)).toDS
val view = s.createOrReplaceTempView("t_Gread")
spark读取普通文本文件与json格式文件 val conf = new SparkConf().setAppName("sort").setMaster("local[*]") val sc = new SparkContext(conf) val sprak = SparkSession.builder().config(conf).getOrCreate() i...