package com.imooc.spark
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
//统计有状态的流式计算
object stateful {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("stateful").setMaster("local[6]")
val ssc = new StreamingContext(conf, Seconds(5))
//定义stateful 中的缓存数据空间,进行新旧数据的加总
ssc.checkpoint(".")
//链接端口
val lines = ssc.socketTextStream("192.168.43.222", 9876)
val value = lines.flatMap(i => i.split(" ")).map(i => (i, 1))
val result = value.updateStateByKey(updateFunction _)
result.print()
ssc.start()
ssc.awaitTermination()
}
//自定义一个函数,将传进来的参数,与历史值进行统计
def updateFunction(currentvalue:Seq[Int],prevalue:Option[Int]): Option[Int] ={
//将刚刚传进来的数值进行加总求和
val newcount = currentvalue.sum
val pre = prevalue.getOrElse(0)
Some(newcount+pre)
}
}
sparkstreaming --通过updatebykey统计有状态的数据
最新推荐文章于 2022-08-19 16:02:01 发布