import org.apache.spark.{SparkConf, SparkContext}
/**
* Created by liupeng on 2017/6/16.
*/
object A_takeOrdered {
System.setProperty("hadoop.home.dir","F:\\hadoop-2.6.5")
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("takeOrdered_test").setMaster("local")
val sc = new SparkContext(conf)
//准备一下数据
val nameList: List[String] = List("c", "d", "a", "e", "f", "k")
val numbers = sc.parallelize(nameList)
//基于内置的排序规则或者自定义的排序规则排序,返回前n个元素
val num = numbers.takeOrdered(3)
for (x <- num) {
println(x)
}
}
}
运行结果:
a
c
d
c
d