import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
object Top3 {
def main(args: Array[String]): Unit = {
val top: SparkConf = new SparkConf().setMaster("local[4]").setAppName("Top3")
val context: SparkContext = new SparkContext(top)
val value1: RDD[String] = context.textFile("data/agent.log")
val value2: RDD[((String, String), Int)] = value1.map({
line => {val data1: Array[String] = line.split(" ")
((data1(1), data1(4)), 1)}})
val value3: RDD[((String, String), Int)] = value2.reduceByKey(_ + _)
val value4: RDD[(String, (String, Int))] = value3.map({
case ((a, b), c) => (a, (b, c))})
val value5: RDD[(String, Iterable[(String, Int)])] = value4.groupByKey()
val value6: RDD[(String, List[(String, Int)])] = value5.mapValues(
line => {line.toList.sortBy(_._2)(Ordering.Int.reverse).take(3)})
value6.foreach(println)
}}
Spark 练习题-每个省广告点击Top3
最新推荐文章于 2024-10-07 22:52:43 发布