package com.rdd.topn
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
object HotCategoryTop10Analysis3 {
def main(args: Array[String]): Unit = {
//TODO Top10热门品类
val conf: SparkConf = new SparkConf().setMaster(“local[*]”).setAppName(“HotCategoryTop10Analysis”)
val sc = new SparkContext(conf)
val actionRDD: RDD[String] = sc.textFile(“in/user_visit_action.txt”)
//将数据转换结构 在同一条数据中
//点击场合(品类ID,(1,0,0))
//下单场合(品类ID,(0,1,0))
//支付场合(品类ID,(0,0,1))
//将相同的品类ID的数据进行分组聚合
//品类(点击数量,下单数量,支付数量)
val flatRDD: RDD[(String, (Int, Int, Int))] = actionRDD.flatMap {
action => {
val datas = action.split("_")
if (datas(6) != “-1”) {
List((datas(6), (1, 0, 0)))
}
else if (datas(8) != “null”) {
val ids = datas(8).split(",")
ids.map(id => (id, (0, 1, 0)))
}
else if (datas(10) != “null”) {
val ids = datas(10).split(",")
ids.map(id => (id, (0, 0, 1)))
}
else {
Nil
}
}
}
val analysisRDD: RDD[(String, (Int, Int, Int))] = flatRDD.reduceByKey((t1, t2) => {
(t1._1 + t2._1,
t2._2 + t2._2,
t1._3 + t2.3)
})
val resultRDD= analysisRDD.sortBy(._2,false).take(10)
resultRDD.foreach(println)
sc.stop()
}
}