spark action之countbykey

java

 1 public class CountByKeyDemo {
 2     private static SparkConf conf = new SparkConf().setMaster("local").setAppName("countbykeydemo");
 3     private static JavaSparkContext jsc = new JavaSparkContext(conf);
 4     public static void main(String[] args) {
 5         List<Tuple2<String,Integer>> list = Arrays.asList(
 6                                             new Tuple2<String,Integer>("tele",100),
 7                                             new Tuple2<String,Integer>("tele",200),
 8                                             new Tuple2<String,Integer>("tele",300),
 9                                             new Tuple2<String,Integer>("yeye",50),
10                                             new Tuple2<String,Integer>("yeye",10),
11                                             new Tuple2<String,Integer>("yeye",70),
12                                             new Tuple2<String,Integer>("wyc",10000)
13                                              );
14         
15         JavaPairRDD<String, Integer> rdd = jsc.parallelizePairs(list);
16         
17         Map<String, Long> map = rdd.countByKey();
18         map.entrySet().forEach(i-> System.out.println(i.getKey() + ":" + i.getValue()));
19         
20         jsc.close();
21     }
22 }

scala

 1 object CountByKeyDemo {
 2     def main(args: Array[String]): Unit = {
 3     val conf = new SparkConf().setMaster("local").setAppName("countdemo");
 4     val sc = new SparkContext(conf);
 5     
 6     val arr = Array(("class1","tele"),("class1","yeye"),("class2","wyc"));
 7     val rdd = sc.parallelize(arr,1);
 8     
 9     val result = rdd.countByKey();
10     for((k,v) <- result) {
11       println(k + ":" + v);
12     }
13   }
14 }

 

转载于:https://www.cnblogs.com/tele-share/p/10269098.html

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值