package example3
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.{DStream, InputDStream, ReceiverInputDStream}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.receiver.Receiver
import org.apache.spark.streaming.{Duration, Seconds, StreamingContext, StreamingContextState}
import redis.clients.jedis.{Jedis, JedisPoolConfig}
import scala.collection.mutable
import scala.util.Random
object HelloStreaming11 {
def main(args: Array[String]): Unit = {
val sparkConf = new SparkConf().setMaster("local[*]").setAppName("MyConsumer")
val ssc = new StreamingContext(sparkConf, Seconds(5))
//有状态操作必须配置这条
ssc.checkpoint("checkPoint")
val socketData: ReceiverInputDStream[String] = ssc.socketTextStream("xx.xx.xx.xx", 9999)
val wordTuple: DStream[(String, Int)] = socketData.map((_, 1))
val result: DStream[(String, Int)] = wordTuple.reduceByKey(_ + _)
result.foreachRDD(
rdd => {
rdd.collect().foreach(println)
}
)
ssc.start();
new Thread(new Runnable {
override def run(): Unit = {
val redisHost = "localhost";
val redisPort = 6379;
val jedis = new Jedis( redisHost, redisPort)
var stopFlag = false;
while(stopFlag != true){
//此处加入逻辑从外部获取关闭标志,如果标志为真,则执行关闭
var flagInRedis = jedis.get("stopSpark")
if(flagInRedis != null ){
stopFlag = true;
}
if(ssc.getState() == StreamingContextState.ACTIVE && stopFlag == true){
println("ssc will stop......")
ssc.stop(true, true);
}
Thread.sleep(5000);
}
}
}).start();
ssc.awaitTermination();
}
}
spark streaming:通过Redis来控制任务的关闭
最新推荐文章于 2024-10-02 18:26:26 发布