简洁版本
val conf: SparkConf =
new SparkConf().setMaster("local[*]").setAppName("WordCount")
val sc = new SparkContext(conf)
sc.textFile("/input")
.flatMap(_.split(" "))
.map((_, 1))
.reduceByKey(_ + _)
.saveAsTextFile("/output")
sc.stop()
详细版本
val conf: SparkConf =
new SparkConf().setMaster("local[*]").setAppName("WordCount")
val sc = new SparkContext(conf)
sc.textFile("/input")
.flatMap(line => line.split("\\s+"))
.map(word => (word,1))
.reduceByKey((tmp,item) => tmp + item)
.saveAsTextFile("/output")
sc.stop()