package javaTest;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import scala.Tuple2;
import java.util.Arrays;
import java.util.Iterator;
public class JavaWordCount {
public static void main(String[] args) {
//wordcount的基本思路
SparkConf conf = new SparkConf();
//java JavaSparkContext
JavaSparkContext sc = new JavaSparkContext(conf);
//读取文件
JavaRDD<String> rdd1 = sc.textFile(args[0]);
//切分<String,String> input output
JavaRDD<String> words = rdd1.flatMap(new FlatMapFunction<String, String>() {
@Override
public Iterator<String> call(String s) throws Exception {
return Arrays.asList(s.split(" ")).iterator();
}
});
//组装String ,String ,Integer 输入类型 返回值类型
JavaPairRDD<String, Integer> wordsWithOne = words.mapToPair(new PairFunction<String, String, Integer>() {
@Override
public Tuple2<String, Integer> call(String s) throws Exception {
//新建一个元组
return new Tuple2<String, Integer>(s, 1);
}
});
//reduceByKey((a,b)=> a+b)
//分组聚合 Integer, Integer,Integer 前2个是输入类型,后一个是返回值类型
JavaPairRDD<String, Integer> reduceRdd = wordsWithOne.reduceByKey(new Function2<Integer, Integer, Integer>() {
@Override
public Integer call(Integer integer, Integer integer2) throws Exception {
return integer + integer2;
}
});
//排序
//先把k -v 反转
//再按照key排序
//再反转
JavaPairRDD<Integer, String> beforeSort = reduceRdd.mapToPair(new PairFunction<Tuple2<String, Integer>, Integer, String>() {
@Override
public Tuple2<Integer, String> call(Tuple2<String, Integer> stringIntegerTuple2) throws Exception {
//元素交换
return stringIntegerTuple2.swap();
}
});
//默认是升序, 降序,传递参数为false
JavaPairRDD<Integer, String> sortedRdd = beforeSort.sortByKey(false);
JavaPairRDD<String, Integer> finalRes = sortedRdd.mapToPair(new PairFunction<Tuple2<Integer, String>, String, Integer>() {
@Override
public Tuple2<String, Integer> call(Tuple2<Integer, String> integerStringTuple2) throws Exception {
return integerStringTuple2.swap();
}
});
//写文件
finalRes.saveAsTextFile(args[1]);
//释放资源
sc.stop();
}
}
提交任务:
spark-submit --master spark://hdp-01:7077 --class cn.spark.test.JavaWordCount /root/spark01-1.0-SNAPSHOT.jar hdfs://hdp-01:9000/wordcount/input hdfs://hdp-01:9000/wordcount/output