// 创建一个Java版本的SparkContext
SparkConf conf = new SparkConf().setAppName("wordCount").setMaster("local");
JavaSparkContext sc = new JavaSparkContext(conf);
// 读取输入数据
JavaRDD<String> lines = sc.textFile("hdfs://input/wordCount.txt");
// 切分单词
JavaRDD<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
private static final long serialVersionUID = 1L;
@Override
public Iterable<String> call(String x) throws Exception {
return Arrays.asList(x.split(","));
}
});
// 转换成键值对,并计算
JavaPairRDD<String, Integer> mappedRDD = words.mapToPair(new PairFunction<String, String, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Tuple2<String, Integer> call(String x) throws Exception {
return new Tuple2<String, Integer>(x, 1);
}
});
JavaPairRDD<String, Integer> wordCountRDD = mappedRDD.reduceByKey(new Function2<Integer, Integer, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Integer call(Integer v1, Integer v2) throws Exception {
return v1 + v2;
}
});
// 将统计出来的单词总数存入一个文本文件,引发求值
wordCountRDD.saveAsTextFile("hdfs://output/wordCountOutPut.txt");