package com.latrobe.spark import org.apache.spark.{SparkContext, SparkConf} /** * Created by spark on 15-1-18. * 统计出集合中每个元素的个数 */ object CountByValue { def main(args: Array[String]) { val conf = new SparkConf().setAppName("spark-demo").setMaster("local") val sc = new SparkContext(conf) val xx = sc.parallelize(List(1,1,1,1,2,2,3,6,5,9)) //打印结果:Map(2 -> 2, 5 -> 1, 1 -> 4, 9 -> 1, 3 -> 1, 6 -> 1) println(xx.countByValue()) } }
spark rdd countByValue
最新推荐文章于 2023-05-24 17:17:52 发布