scala实现
package com.bjsxt.myscalacode
import org.apache.spark.{SparkConf, SparkContext}
object SparkWordCount {
def main(args: Array[String]): Unit = {
val conf = new SparkConf()
conf.setMaster("local")
conf.setAppName("sparkwordcount")
val sc = new SparkContext(conf)
val lines = sc.textFile(".idea/data/words")
val words = lines.flatMap(line =>{line.split(" ")})
val pairWords = words.map(word =>{new Tuple2(word,1)})
val result = pairWords.reduceByKey((v1,v2) =>{v1+v2})
result.sortBy(tp=>{tp._2},false).foreach(println)
/* result.foreach(tuple =>{
println(tuple)
})*/
sc.stop()
}
}
简化:
val conf = new SparkConf().setMaster("local").setAppName("sparkwordcount")
val sc = new SparkContext(conf)
sc.textFile("./data/words").flatMap(_.split(" ")).map((_,1)).reduceByKey(_+_).foreach(println)
java实现
package com.bjsxt.myjavacode;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.*;
import scala.Tuple2;
import java.util.Arrays;
import java.util.Iterator;
public class JavaWordCount {
public static void main(String[] args) {
SparkConf sparkConf = new SparkConf();
sparkConf.setMaster("local");
sparkConf.setAppName("wc");
JavaSparkContext sparkContext = new JavaSparkContext(sparkConf);
JavaRDD<String> lines = sparkContext.textFile(".idea/data/words");
JavaRDD<String> flatMap = lines.flatMap(new FlatMapFunction<String, String>() {
@Override
public Iterator<String> call(String s) throws Exception {
return Arrays.asList(s.split(" ")).iterator();
}
});
JavaPairRDD<String, Integer> pairRDD = flatMap.mapToPair(new PairFunction<String, String, Integer>() {
@Override
public Tuple2<String, Integer> call(String word) throws Exception {
return new Tuple2<String,Integer>(word, 1);
}
});
JavaPairRDD<String, Integer> reduceByKey = pairRDD.reduceByKey(new Function2<Integer, Integer, Integer>() {
@Override
public Integer call(Integer v, Integer v2) throws Exception {
return v + v2;
}
});
reduceByKey.foreach(new VoidFunction<Tuple2<String, Integer>>() {
@Override
public void call(Tuple2<String, Integer> tuple2) throws Exception {
System.out.println(tuple2);
}
});
}
}
简化
SparkConf conf = new SparkConf();
conf.setMaster("local");
conf.setAppName("wc");
JavaSparkContext sc = new JavaSparkContext(conf);
JavaRDD<String> lines = sc.textFile("./data/words");
JavaRDD<String> words = lines.flatMap(line -> Arrays.asList(line.split(" ")).iterator());
JavaPairRDD<String, Integer> pairWords = words.mapToPair(word -> new Tuple2<>(word, 1));
JavaPairRDD<String, Integer> result = pairWords.reduceByKey((v1, v2) -> v1 + v2);
result.foreach(tuple2 -> System.out.println(tuple2));
sc.stop();