大数据——Spark RDD算子(十)PairRDD的Action(动作)算子countByKey、collectAsMap

Spark RDD算子(十)PairRDD的Action(动作)算子countByKey、collectAsMap

countBykey

def countByKey(): Map[K, Long]
  • 统计出现相同key的次数

Scala版本

package nj.zb.sparkstu

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object countByKey {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("countByKeyScala")
    val sc: SparkContext = new SparkContext(conf)
    val rdd1: RDD[(Int, Int)] = sc.parallelize(List((1,1),(2,1),(2,2),(3,1),(3,2),(3,3)))
    val rdd2: collection.Map[Int, Long] = rdd1.countByKey()
    rdd2.foreach(println)
  }
}

结果展示:在这里插入图片描述

Java版本

package nj.zb.sparkstu;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import scala.Tuple2;

import java.util.Arrays;
import java.util.Map;
import java.util.Set;

public class countByKeyJava {
	public static void main(String[] args) {
		SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("countByKeyJava");
		JavaSparkContext sc = new JavaSparkContext(conf);
		JavaRDD<Tuple2<Integer, Integer>> rdd1 = sc.parallelize(Arrays.asList(
				new Tuple2<Integer, Integer>(1, 1),
				new Tuple2<Integer, Integer>(2, 1),
				new Tuple2<Integer, Integer>(2, 2),
				new Tuple2<Integer, Integer>(3, 1),
				new Tuple2<Integer, Integer>(3, 2),
				new Tuple2<Integer, Integer>(3, 3)));
		JavaPairRDD<Integer, Integer> rdd2 = JavaPairRDD.fromJavaRDD(rdd1);
		Map<Integer, Long> rdd3 = rdd2.countByKey();
		Set<Integer> rdd4 = rdd3.keySet();
		for (Integer a :
				rdd4) {
			System.out.println(a+"->"+rdd3.get(a));
		}
	}
}

结果展示:在这里插入图片描述

colleactAsMap

  • 将Pair类型(键值对类型)的RDD转换成map

Scala版本

package nj.zb.sparkstu

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object collectAsMap {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("collectAsMap")
    val sc: SparkContext = new SparkContext(conf)
    val rdd1: RDD[(Int, Int)] = sc.parallelize(List((1,1),(2,1),(2,2),(3,1),(3,2),(3,3)))
    val rdd2: collection.Map[Int, Int] = rdd1.collectAsMap()
    rdd2.foreach(println)
  }
}

结果展示:在这里插入图片描述

Java版本

package nj.zb.sparkstu;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.PairFunction;
import scala.Tuple2;

import java.util.Arrays;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;

public class CollectAsMapJava {
	public static void main(String[] args) {
		SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("CollectAsMapJava");
		JavaSparkContext sc = new JavaSparkContext(conf);
		JavaRDD<Tuple2<Integer, Integer>> rdd1 = sc.parallelize(Arrays.asList(
				new Tuple2<>(1, 1),
				new Tuple2<>(2, 1),
				new Tuple2<>(2, 2),
				new Tuple2<>(3, 1),
				new Tuple2<>(3, 2),
				new Tuple2<>(3, 3)));

		JavaPairRDD<Integer, Integer> rdd2 = rdd1.mapToPair(new PairFunction<Tuple2<Integer, Integer>, Integer, Integer>() {
			@Override
			public Tuple2<Integer, Integer> call(Tuple2<Integer, Integer> integerIntegerTuple2) throws Exception {
				return integerIntegerTuple2;
			}
		});
		Map<Integer, Integer> rdd3 = rdd2.collectAsMap();
		Set<Integer> rdd4 = rdd3.keySet();
		for (Integer a :
				rdd4) {
			System.out.println(a+"->"+rdd3.get(a));
		}
	}
}

注意:这里不能直接用JavaPairRDD.fromJavaRDD()转换,不能会报错
结果展示:在这里插入图片描述

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值