package TeacherTopN2 import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.rdd.RDD import scala.collection.mutable object TeacherN06 { def main(args: Array[String]): Unit = { val conf = new SparkConf().setAppName("Teacher").setMaster("local[*]") val sc = new SparkContext(conf) val lines = sc.textFile("F:\\spark\\计算每个学科最受欢迎Top2.log") val line: RDD[((String, String), Int)] = lines.map(it => { val strings = it.split("/") val teacher = strings(3) val subject = strings(2).split("[.]")(0) ((subject, teacher), 1) }) val reduceSubAndOne: RDD[((String, String), Int)] = line.reduceByKey(_ + _) val groupBySub: RDD[(String, Iterable[((String, String), Int)])] = reduceSubAndOne.groupBy(_._1._1) val value1: RDD[(String, ((String, String), Int))] = groupBySub.flatMapValues(it => { implicit val value = Ordering[Int].on[((String, String), Int)](i => -i._2) val tuples = new mutable.TreeSet[((String, String), Int)]() it.map(i => { tuples += i if (tuples.size > 2) { tuples -= tuples.last } }) tuples.iterator }) val value: RDD[(String, String, Int)] = value1.map(i => (i._2._1._1, i._2._1._2, i._2._2)) value.collect().foreach(println) } }