countByKey
将相同key值的数据进行计数
以RDD{(1,2),(2,4),(2,5),(3,4),(3,5),(3,6)}为例,rdd countByKey会返回{(1,1),(2,2),(3,3)}
scala版本
val conf = new SparkConf().setMaster("local[1]").setAppName("CountByKey")
val sc = new SparkContext(conf)
val rdd = sc.parallelize(Array((1,2),(2,4),(2,5),(3,4),(3,5),(3,6)))
val countByKeyRDD = rdd.countByKey()
countByKeyRDD.foreach(println)
java版本
SparkConf conf = new SparkConf().setMaster("local[1]").setAppName("CountByKey");
JavaSparkContext sc =new JavaSparkContext(conf);
JavaRDD<Tuple2<Integer,Integer>> tupleRDD = sc.parallelize(Arrays.asList(
new Tuple2<>(1,2),
new Tup;e2<>(2,4),
new Tuple2<>(2,5),
new Tuple2<>(3,4),
new Tuple2<>(3,5),
new Tuple2<>(3,6)
));
JavaPairRDD<Integer,Integer> mapRDD = JavaPairRDD.fromJavaRDD(tupleRDD);
Map<Integer,Long> countByKeyRDD = mapRDD.countByKey();
for (Integer i : countByKeyRDD.keySet()) {
System.out.println("key:\t" + i + "\tvalue:\t" + countByKeyRDD.get(i));
}
collectAsMap
将pair类型(键值对类型)的RDD转换成map
scala版本
val conf = new SparkConf().setMaster("local[1]").setAppName("CollectAsMap")
val sc = new SparkContext(conf)
val rdd = sc.parallelize(Array((1,2),(2,4),(2,1),(3,4),(3,5),(3,6)))
rdd.collectAsMap().foreach(println)
java版本
SparkConf conf = new SparkCOnf().setMaster("local[1]").setAppName("CollectAsMap);
JavaSparkContext sc = new JavaSparkContext(conf);
JavaRDD<Tuple2<Integer,Integer>> tupleRDD = sc.parallelize(Arrays.asList(
new Tuple2<>(1,2),
new Tuple2<>(2,4),
new Tuple2<>(2,5),
new Tuple2<>(3,4),
new Tuple2<>(3,5),
new Tuple2<>(3,6)
));
JavaPairRDD<Integer,Integer> pairRdd = tupleRDD.mapToPair(new PairFunction<Tuple2<Integer,Integer>,Integer,Integer>() {
public Tuple2<Integer,Integer> call(Tuple2<Integer,Integer> it) throws Exception {
return it;
}
});
Map<Integer,Integer> collectMap = pairRDD.collectAsMap();
for (Integer i : collectMap.keySet()) {
System.out.println("(" + i + "," + collectMap.get(i) + ")");
}