Spark核心编程:高级编程之基于排序机制的wordcount程序

案例

1.案例需求:
1>对文本文件内的每个单词都统计出其出现的次数。
2>按照每个单词出现次数的数量,降序排序。
2.代码:
Java版:

import java.util.Arrays;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;

import scala.Tuple2;
/*
 * 排序的wordCount程序
 */
public class SortWordCount {

    public static void main(String[] args) {
        SparkConf conf = new SparkConf()
                .setAppName("SortWordCount")
                .setMaster("local");
        JavaSparkContext sc = new JavaSparkContext(conf);
        //创建lines RDD
        JavaRDD<String>lines = sc.textFile("G://SparkDevel//test//wordCount//data//spark.txt");
        //分词
        JavaRDD<String> words = lines.flatMap(new FlatMapFunction<String, String>() {

            private static final long serialVersionUID = 1L;

            @Override
            public Iterable<String> call(String v1) throws Exception {

                return Arrays.asList(v1.split(" "));
            }
        });
        //创建元组(Word,1)
        JavaPairRDD<String, Integer> pairs = words.mapToPair(new PairFunction<String, String, Integer>() {

            private static final long serialVersionUID = 1L;

            @Override
            public Tuple2<String, Integer> call(String v1) throws Exception {

                return new Tuple2<String, Integer>(v1, 1);
            }
        });
        //计算每个单词出现的次数
        JavaPairRDD<String, Integer>wordCount = pairs.reduceByKey(new Function2<Integer, Integer, Integer>() {

            private static final long serialVersionUID = 1L;

            @Override
            public Integer call(Integer v1, Integer v2) throws Exception {

                return v1 + v2;
            }
        }); 
        //,是要按照每个单词出现次数的顺序,降序排序
        // wordCounts的格式:(hello, 3) (you, 2)
        // 我们需要将RDD转换成(3, hello) (2, you)的这种格式,才能根据单词出现次数进行排序把!
        //进行key-value的反转映射
        JavaPairRDD<Integer, String>countWord = wordCount.mapToPair(
                new PairFunction<Tuple2<String,Integer>, Integer, String>() {

                    private static final long serialVersionUID = 1L;

                    @Override
                    public Tuple2<Integer, String> call(Tuple2<String, Integer> v1) throws Exception {
                        return new Tuple2<Integer, String>(v1._2,v1._1);
                    }
        });
        //按照key进行排序
        JavaPairRDD<Integer, String>sortCountWord = countWord.sortByKey(false);
        //再次将value-key进行反转映射
        JavaPairRDD<String, Integer> sortWordCount = sortCountWord.mapToPair(new PairFunction<Tuple2<Integer,String>, String, Integer>() {

            private static final long serialVersionUID = 1L;

            @Override
            public Tuple2<String, Integer> call(Tuple2<Integer, String> v1) throws Exception {

                return new Tuple2<String, Integer>(v1._2, v1._1);
            }
        });
        //打印排序后的单词计数
        sortWordCount.foreach(new VoidFunction<Tuple2<String,Integer>>() {

            private static final long serialVersionUID = 1L;

            @Override
            public void call(Tuple2<String, Integer> v1) throws Exception {
                System.out.println(v1._1 + ":" + v1._2);

            }
        });
    }

}

Scala版:

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object SortWordCount {
 def main(args: Array[String]){
   val conf = new SparkConf()
       .setAppName("SortWordCount")
       .setMaster("local")
   val sc = new SparkContext(conf)

   val lines = sc.textFile("G://SparkDevel//test//wordCount//data//spark.txt", 4)
   val words = lines.flatMap(_.split(" "))
   val pairs = words.map((_, 1))
   val wordCount = pairs.reduceByKey(_ + _)
   val countWord = wordCount.map(wordCount => (wordCount._2, wordCount._1))  
   val sortCountWord = countWord.sortByKey(false)
   val sortWordCount = sortCountWord.map(sortCountWord => (sortCountWord._2, sortCountWord._1))
   sortWordCount.foreach(sortWordCount => println(sortWordCount._1 + ":" + sortWordCount._2))

 } 
}

结果:
这里写图片描述

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值