package com.spark.streaming
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}
import scala.collection.immutable
//todo:利用sparkStreaming接受kafka中的数据实现单词计数----采用receivers
object SparkStreamingKafkaReceiverCheckpoint {
System.setProperty("hadoop.home.dir", "d://soft/hadoop/hadoop-2.7.3")
def updateFunc(a:Seq[Int], b:Option[Int]) :Option[Int] ={
Some(a.sum+b.getOrElse(0))
}
def main(args: Array[String]): Unit = {
val checkpointPath = "./kafka-receiver"
val ssc = StreamingContext.getOrCreate(checkpointPath, () => {
createFunc(checkpointPath)
})
ssc.start()
ssc.awaitTermination()
}
def createFunc(checkpointPath:String): StreamingContext = {
//todo:1、创建sparkConf
val sparkConf: SparkConf = new SparkConf()
.setAppName("SparkStreamingKafka_Receiver_checkpoint")
.setMaster("local[4]")
//todo:开启wal预写日志
.set("spark.streaming.receiver.writeAheadLog.enable","true")
//todo:2、创建sparkContext
val sc = new SparkContext(sparkConf)
sc.setLogLevel("WARN")
//todo:3、创建StreamingContext
val ssc = new StreamingContext(sc,Seconds(5))
ssc.checkpoint(checkpointPath)
//todo:4、指定zkServer
val zkServer="star.com:2181"
//todo:5、指定groupId
val groupId="spark-kafka-receiver01"
//todo:6、指定topics 这个可以利用一个消费者组来消费多个topic,
//(topic_name -> numPartitions) 指定topic消费的线程数
val topics=Map("kafka_spark"->1)
//todo:7、并行运行更多的接收器读取kafak topic中的数据,这里设置3个
val resultDStream = (1 to 3).map(x => {
//todo:8、通过使用KafkaUtils的createStream接受kafka topic中的数据,生成DStream
val kafkaDataDStream = KafkaUtils.createStream(ssc, zkServer, groupId, topics).map(x => x._2)
kafkaDataDStream
}
)
//todo:利用StreamContext将所有的DStream组合在一起
val kafkaDStream = ssc.union(resultDStream)
//todo:8、获取kafka中topic的内容
//todo:9、切分每一行。每个单词记为1
val wordAndOne = kafkaDStream.flatMap(_.split(" ")).map((_,1))
//todo:10、相同单词出现的次数累加
val result = wordAndOne.updateStateByKey(updateFunc)
//todo:打印
result.print()
ssc
}
}
配置信息:
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-8_2.11</artifactId>
<version>2.4.5</version>
</dependency>