import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
object WindowTestHotWorld {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setMaster("local[2]").setAppName(this.getClass.getSimpleName)
val ssc = new StreamingContext(conf,Seconds(5))
val inputStream = ssc.socketTextStream("b02master",1415)
val WindowData = inputStream.map(line => {
line.split(",", -1)(0)
})
.filter(!_.isEmpty)
.map((_, 1))
.reduceByKeyAndWindow((x: Int, y: Int) => x + y, Seconds(60), Seconds(20))
WindowData.transform(rdd=>{
val arr = rdd.sortBy(_._2).take(3)
rdd.sparkContext.makeRDD(arr)
}).print()
ssc.start()
ssc.awaitTermination()
}
}
WindowTestHotWorld(spark)
最新推荐文章于 2023-03-13 11:46:49 发布