package cn.testdemo.dstream.socket
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
//todo:利用sparkStreaming开窗函数reduceBykeyAndWindow实现单词计数
object SparkStreamingSocketWindow {
//currentValues:表示的是当前批次中相同单词出现所有的1 (hadoop,1)(hadoop,1)(hadoop,1)
//historyValue:表示之前所有批次中单词出现的总次数
def updateFunc(currentValues:Seq[Int], historyValue:Option[Int]):Option[Int] ={
val newValues: Int = currentValues.sum + historyValue.getOrElse(0)
Some(newValues)
}
def main(args: Array[String]): Unit = {
//1、创建sparkConf
val sparkConf: SparkConf = new SparkConf().setAppName("SparkStreamingSocketWindow").setMaster("local[2]")
//2、创建sparkContext
val sc = new SparkContext(sparkConf)
sc.setLogLevel("WARN")
//3、创建streamingContext
val ssc = new StreamingContext(sc,Seconds(5))
//设置checkpoint,用于保存中间结果数据
ssc.checkpoint("./ck2018")
//4、获取socket数据
val stream: ReceiverInputDStream[String] = ssc.socketTextStream("192.168.216.128",9999)
//5、操作数据流
val wordAndOne: DStream[(String, Int)] = stream.flatMap(_.split(" ")).map((_,1))
//6、实现单词计数reduceByKeyAndWindow
//reduceFunc:需要一个函数作用在Dstream
//windowDuration:表示窗口的长度
//slideDuration :表示窗口的滑动时间,即每隔多久计算一次
val result: DStream[(String, Int)] = wordAndOne.reduceByKeyAndWindow((x:Int,y:Int)=>x+y,Seconds(5),Seconds(10))
//7、打印结果数据
result.print()
//8、开启计算
ssc.start()
ssc.awaitTermination()
}
}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
//todo:利用sparkStreaming开窗函数reduceBykeyAndWindow实现单词计数
object SparkStreamingSocketWindow {
//currentValues:表示的是当前批次中相同单词出现所有的1 (hadoop,1)(hadoop,1)(hadoop,1)
//historyValue:表示之前所有批次中单词出现的总次数
def updateFunc(currentValues:Seq[Int], historyValue:Option[Int]):Option[Int] ={
val newValues: Int = currentValues.sum + historyValue.getOrElse(0)
Some(newValues)
}
def main(args: Array[String]): Unit = {
//1、创建sparkConf
val sparkConf: SparkConf = new SparkConf().setAppName("SparkStreamingSocketWindow").setMaster("local[2]")
//2、创建sparkContext
val sc = new SparkContext(sparkConf)
sc.setLogLevel("WARN")
//3、创建streamingContext
val ssc = new StreamingContext(sc,Seconds(5))
//设置checkpoint,用于保存中间结果数据
ssc.checkpoint("./ck2018")
//4、获取socket数据
val stream: ReceiverInputDStream[String] = ssc.socketTextStream("192.168.216.128",9999)
//5、操作数据流
val wordAndOne: DStream[(String, Int)] = stream.flatMap(_.split(" ")).map((_,1))
//6、实现单词计数reduceByKeyAndWindow
//reduceFunc:需要一个函数作用在Dstream
//windowDuration:表示窗口的长度
//slideDuration :表示窗口的滑动时间,即每隔多久计算一次
val result: DStream[(String, Int)] = wordAndOne.reduceByKeyAndWindow((x:Int,y:Int)=>x+y,Seconds(5),Seconds(10))
//7、打印结果数据
result.print()
//8、开启计算
ssc.start()
ssc.awaitTermination()
}
}