创建文件
here where my your
hello world
test file
jump you can you jump
实现代码
package cn.tukk.sparkproject.test.testOrder;
import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.rdd.RDD;
import org.apache.spark.sql.types.IntegerType;
import scala.Tuple2;
import java.util.Arrays;
public class SparkWordCount {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setAppName("testWordCount").setMaster("local");
JavaSparkContext sc = new JavaSparkContext(conf);
JavaRDD<String> lines = sc.textFile("wordCount");
JavaRDD<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
private static final long serialVersionUID = 9166467038300894254L;
@Override
public Iterable<String> call(String s) throws Exception {
return Arrays.asList(s.split(" "));
}
});
JavaPairRDD<String, Integer> pairs = words.mapToPair(new PairFunction<String, String, Integer>() {
private static final long serialVersionUID = -2575877317483490894L;
@Override
public Tuple2<String, Integer> call(String s) throws Exception {
return new Tuple2<String,Integer>(s,1);
}
});
JavaPairRDD<String, Integer> wordCounts = pairs.reduceByKey(new Function2<Integer, Integer, Integer>() {
private static final long serialVersionUID = 1995267764597335108L;
@Override
public Integer call(Integer v1, Integer v2) throws Exception {
return v1+v2;
}
});
wordCounts.foreach(new VoidFunction<Tuple2<String, Integer>>() {
private static final long serialVersionUID = 7339200658105284851L;
@Override
public void call(Tuple2<String, Integer> wordAndCount) throws Exception {
System.out.println(wordAndCount._1+wordAndCount._2);
}
});
sc.close();
}
}
输出