package com.ws.spark
import kafka.serializer.StringDecoder
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Duration, StreamingContext}
import redis.clients.jedis.Jedis
object OrderCount {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("OrderCount").setMaster("local[4]")
val ssc = new StreamingContext(conf, Duration(5000))
//组名
val group = "group1"
//消费的topic
val topic = "orders"
//kafka的broker地址
val brokerList = "192.168.0.21:9092"
val topics: Set[String] = Set(topic)
//kafka参数
val kafkaParams = Map("metadata.broker.list" -> brokerList,
"group.id" -> group,
//每次启动重头开始消费
"auto.offset.reset" -> kafka.api.OffsetRequest.SmallestTimeString
)
var kafkaStream: InputDStream[(String, String)] =
KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](ssc, kafkaParams, topics)
kafkaStream.foreachRDD(rdd => {
if (!rdd.isEmpty()) {
val lineData: RDD[String] = rdd.map(_._2)
val arr: RDD[Array[String]] = lineData.map(_.split(" "))
val totalPrice: RDD[Double] = arr.map(a => {
val price = a(a.length - 1).toDouble
price
})
val result = totalPrice.reduce(_ + _)
val jedis = new Jedis("192.168.0.21", 6379)
println(result)
jedis.incrByFloat("TEST:TOTAL_MONEY", result)
jedis.close()
}
})
ssc.start()
ssc.awaitTermination()
}
}