package spark
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
/**linux 上的写法
* sc.textFile("input").flatMap(split(" ")).map(_,1).reduceNBykey(_+_).collect
*
*/
object WordCount {
def main(args: Array[String]): Unit = {
// 获取SparkConf
val sparkConf: SparkConf = new SparkConf().setAppName("WordCount")
//创建SparkContext 对象
val sc: SparkContext = new SparkContext(sparkConf)
//读取文件
val line: RDD[String] = sc.textFile(args(0))
//将文件拍平
val word = line.flatMap(_.split(" "))
//将单词映射成元组
val wordAndOne: RDD[(String, Int)] = word.map((_, 1))
//聚合
val wordAndCount: RDD[(String, Int)] = wordAndOne.reduceByKey(_ + _)
//将得到的结果存入到文件中
wordAndCount.saveAsTextFile(args(1))
//关闭连接
sc.stop()
}
}
/*
poml文件
可以将spark scala 的相关依赖加入
*/
<version>2.1.1</version>
</dependency>
</dependencies>
<build>
<finalName>WordCount</finalName>
<plugins>
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>3.2.2</version>
<executions>
<execution>
<goals>
<goal>compile</goal>
<goal>testCompile</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>3.0.0</version>
<configuration>
<archive>
<manifest>
<mainClass>WordCount</mainClass>
</manifest>
</archive>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>