1. 添加pom依赖
<dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_2.11</artifactId> <version>2.2.0</version> </dependency>
2. 本地模式运行spark应用
import java.util.Arrays; import java.util.Iterator; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.function.FlatMapFunction; import org.apache.spark.api.java.function.Function2; import org.apache.spark.api.java.function.PairFunction; import scala.Tuple2; /** * @author jianhong * @date 2018/3/15 */ public class JavaWordCount { public static void main(String[] args) { SparkConf conf = new SparkConf().setAppName("wordcount").setMaster("local"); JavaSparkContext sc = new JavaSparkContext(conf); JavaRDD<String> lines = sc.textFile("/Users/jianhong1/testfile"); JavaRDD<String> words = lines.flatMap(new FlatMapFunction<String, String>() { public Iterator<String> call(String line) throws Exception { return Arrays.asList(line.split("\\s")).iterator(); } }); JavaPairRDD<String, Integer> pairs = words.mapToPair( new PairFunction<String, String, Integer>() { public Tuple2<String, Integer> call(String word) throws Exception { return new Tuple2<String, Integer>(word, 1); } }); JavaPairRDD<String, Integer> wordcount = pairs.reduceByKey( new Function2<Integer, Integer, Integer>() { public Integer call(Integer v1, Integer v2) throws Exception { return v1 + v2; } }); for (Tuple2<String, Integer> pair: wordcount.collect()) { System.out.println(pair._1 + ": " + pair._2); } sc.close(); } }