新建自定义累加器类--MyAccumulator
package Util
import org.apache.spark.util.AccumulatorV2
import scala.collection.mutable
/*
* 定义累加器
* worldCount
* 1、继承AccumulatorV2,定义泛型
* 参数:
* IN:输入类型,在本demo中输入的是单词->String
* OUT:输出类型,输出为(单词,次数)即Map
* 2、重写方法
* add:查找是否有这个key,没有的话就给(word,0),有的话就(word,x+1)
* value:返回结果:(单词,次数)
* merge:合并累加器,在WordCount中合并即,如果key相同,那么值+1,先遍历一个再用getOrElse
* */
class MyAccumulator extends AccumulatorV2[String, mutable.Map[String, Int]] {
private var wcMap = mutable.Map[String, Int]() //用来存放输出的map->(单词,次数)
//判断是否为初始状态
override def isZero: Boolean = {
wcMap.isEmpty
}
override def copy(): AccumulatorV2[String, mutable.Map[String, Int]] = {
new MyAccumulator
}
override def reset(): Unit = {
wcMap.clear()
}
override def add(word: String): Unit = {
//查找是否有这个key,没有的话就给0,有的话就+1
wcMap.update(word, wcMap.getOrElse(word, 0) + 1)
}
override def merge(other: AccumulatorV2[String, mutable.Map[String, Int]]): Unit = {
//当前的map和传值的map进行合并
other.value.foreach(
map => wcMap.update(map._1, wcMap.getOrElse(map._1, 0) + 1)
)
}
override def value: mutable.Map[String, Int] = {
wcMap
}
}
抽取环境准备相关代码成为环境特质(即sparkContext和sparkSession,为一般项目代码常用当时,与本demo无关)
package Util
import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}
trait Env {
def getSparkContext(appName: String="AppName",master:String="local[*]"):SparkContext = {
val conf: SparkConf = new SparkConf().setMaster(master).setAppName(appName)
new SparkContext(conf)
}
def getSparkSession(appName: String="AppName",master:String="local[*]"): SparkSession = {
SparkSession.builder().master("local").appName("dataSetDemo").getOrCreate()
}
}
使用累加器
package SparkCore.Accumulator
import Util.{Env, MyAccumulator}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
object WorldCountAccumulator extends Env{
def main(args: Array[String]): Unit = {
/**
* 使用自定义累加器实现worldCount
* 1、创建累加器
* 2、向spark注册累加器
* 3、worldCount需要遍历,每次把单词加到累加器中
* 4、读取累加器的值
* */
Logger.getLogger("org").setLevel(Level.ERROR)
//准备spark环境
val sparkContext: SparkContext = getSparkContext()
val wcAccumulator = new MyAccumulator; //1、创建累加器
sparkContext.register(wcAccumulator, "worldAcc") //2、向spark注册累加器
val rdd: RDD[String] = sparkContext.makeRDD(List("happy", "pokey", "lorin", "hotpot", "hotpot"))
rdd.foreach(
dog => {
wcAccumulator.add(dog)
}
)
println(wcAccumulator.value)
sparkContext.stop()
}
}
sparkcore常用maven依赖
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.example</groupId>
<artifactId>spark_test</artifactId>
<version>1.0-SNAPSHOT</version>
<name>${project.artifactId}</name>
<description>My wonderfull scala app</description>
<inceptionYear>2015</inceptionYear>
<licenses>
<license>
<name>My License</name>
<url>http://....</url>
<distribution>repo</distribution>
</license>
</licenses>
<properties>
<maven.compiler.source>1.6</maven.compiler.source>
<maven.compiler.target>1.6</maven.compiler.target>
<encoding>UTF-8</encoding>
<scala.version>2.12.17</scala.version>
<scala.compat.version>2.12</scala.compat.version>
<spark.version>3.1.2</spark.version>
<hadoop.version>3.1.1</hadoop.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<maven.compiler.encoding>UTF-8</maven.compiler.encoding>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.12</artifactId>
<version>${spark.version}</version>
<!-- <scope>provided</scope> -->
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.12</artifactId>
<version>${spark.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.12</artifactId>
<version>${spark.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_2.12</artifactId>
<version>${spark.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-mllib_2.12</artifactId>
<version>${spark.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>2.7.4</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.7.4</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.7.4</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.7.4</version>
</dependency>
<!-- Test -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.21</version>
</dependency>
</dependencies>
<build>
<sourceDirectory>src/main</sourceDirectory>
<testSourceDirectory>src/main</testSourceDirectory>
<plugins>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<!--jar包的文件名 -->
<finalName>${project.groupId}</finalName>
<archive>
<manifest>
<!--这里要替换成jar包main方法所在类 -->
<mainClass>src.spark.demo02</mainClass>
</manifest>
</archive>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>make-assembly</id> <!-- this is used for inheritance merges -->
<phase>package</phase> <!-- 指定在打包节点执行jar包合并操作 -->
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<!-- see http://davidb.github.com/scala-maven-plugin -->
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>3.2.0</version>
<executions>
<execution>
<goals>
<goal>compile</goal>
<goal>testCompile</goal>
</goals>
<configuration>
<args>
<arg>-dependencyfile</arg>
<arg>${project.build.directory}/.scala_dependencies</arg>
</args>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.18.1</version>
<configuration>
<useFile>false</useFile>
<disableXmlReport>true</disableXmlReport>
<!-- If you have classpath issue like NoDefClassError,... -->
<!-- useManifestOnlyJar>false</useManifestOnlyJar -->
<includes>
<include>**/*Test.*</include>
<include>**/*Suite.*</include>
</includes>
</configuration>
</plugin>
</plugins>
</build>
</project>