--------------------TopK--直接倒排序,没有分区,数据量小的情况下使用------------------------------------------------
方法一:
package sparkStreaming.pag
import org.apache.spark.{SparkConf, SparkContext}
/**
* Created by zyf on 2016/9/29.
*/
object top {
def main(args: Array[String]){
val conf = new SparkConf().setAppName("top").setMaster("local")
val sc = new SparkContext(conf)
val count = sc.textFile("D:\\data\\SogouQ")
val result = count.flatMap(line => line.split("\t")).map(word=>(word,1)).reduceByKey(_ + _)
val sorted = result.map{case(key,value) => (value,key)}.sortByKey(false,10)
sorted.coalesce(1,true).saveAsTextFile("C:\\result\\5")
sc.stop
}
}
----------TopK-----------------------------------------------------------------------------------------------------
方法二:
计算出排行前十的
package sparkStreaming.pag
import org.apache.spark.{SparkContext,SparkConf}
object TopK {
def main(args: Array[String]){
val conf = new SparkConf().setMaster("local").setAppName("app name")
val sc = new SparkContext(conf)
val textRDD = sc.textFile("D:\\data\\SogouQ\\20060802\\part-00000")
val count = textRDD.flatMap(line => line.split(",")).map(world =>(world,1)).reduceByKey(_+_)
//textRDD.flatMap(line => line.split("\t")).foreach(println)
val topk = count.mapPartitions(getTopk).collect()
val iter = topk.iterator
val outiter = getTopk(iter)
println("Topk")
while(outiter.hasNext){
val tmp = outiter.next()
println("\n" + tmp._1 + ": " + tmp._2)
}
sc.stop()
}
def getTopk(iter: Iterator[(String, Int)]): Iterator[(String, Int)] = {
val a = new Array[(String, Int)](10)
while(iter.hasNext){
val tmp = iter.next()
var flag = true
for(i <- 0 until a.length if flag){
if(a(i) !=null && tmp._2 > a(i)._2){
for(j <- ((i+1) until a.length).reverse){a(j) = a(j-1)}
a(i) = tmp
flag = false
}else if(a(i) == null){
a(i) = tmp
flag = false
}
}
}
a.iterator
}
}
--------------------------------------------------------------------------------------------
方法一:
package sparkStreaming.pag
import org.apache.spark.{SparkConf, SparkContext}
/**
* Created by zyf on 2016/9/29.
*/
object top {
def main(args: Array[String]){
val conf = new SparkConf().setAppName("top").setMaster("local")
val sc = new SparkContext(conf)
val count = sc.textFile("D:\\data\\SogouQ")
val result = count.flatMap(line => line.split("\t")).map(word=>(word,1)).reduceByKey(_ + _)
val sorted = result.map{case(key,value) => (value,key)}.sortByKey(false,10)
sorted.coalesce(1,true).saveAsTextFile("C:\\result\\5")
sc.stop
}
}
----------TopK-----------------------------------------------------------------------------------------------------
方法二:
计算出排行前十的
package sparkStreaming.pag
import org.apache.spark.{SparkContext,SparkConf}
object TopK {
def main(args: Array[String]){
val conf = new SparkConf().setMaster("local").setAppName("app name")
val sc = new SparkContext(conf)
val textRDD = sc.textFile("D:\\data\\SogouQ\\20060802\\part-00000")
val count = textRDD.flatMap(line => line.split(",")).map(world =>(world,1)).reduceByKey(_+_)
//textRDD.flatMap(line => line.split("\t")).foreach(println)
val topk = count.mapPartitions(getTopk).collect()
val iter = topk.iterator
val outiter = getTopk(iter)
println("Topk")
while(outiter.hasNext){
val tmp = outiter.next()
println("\n" + tmp._1 + ": " + tmp._2)
}
sc.stop()
}
def getTopk(iter: Iterator[(String, Int)]): Iterator[(String, Int)] = {
val a = new Array[(String, Int)](10)
while(iter.hasNext){
val tmp = iter.next()
var flag = true
for(i <- 0 until a.length if flag){
if(a(i) !=null && tmp._2 > a(i)._2){
for(j <- ((i+1) until a.length).reverse){a(j) = a(j-1)}
a(i) = tmp
flag = false
}else if(a(i) == null){
a(i) = tmp
flag = false
}
}
}
a.iterator
}
}
--------------------------------------------------------------------------------------------