sparkstreaming 中动态广播变量的使用

package test
 
import java.sql.{Connection, DriverManager, ResultSet, Statement}
import java.text.SimpleDateFormat
import java.util.{Date, Properties}
import kafka._
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.{StringDeserializer}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, HasOffsetRanges, KafkaUtils, LocationStrategies}
import org.apache.spark.{SparkConf, SparkContext}
 
 
object test3 {
  @volatile private var instance: Broadcast[Map[String, Double]] = null
  var kafkaStreams: InputDStream[ConsumerRecord[String, String]] = null
  val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss:SSS")
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.INFO)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.INFO)
    Logger.getLogger("org.apache.kafka.clients.consumer").setLevel(Level.INFO)
    val conf = new SparkConf().setAppName("Spark Streaming TO ES TOPIC")
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    @transient
    val scc = new StreamingContext(conf, Seconds(1))
    val topic = PropertiesScalaUtils.loadProperties("topic_combine")
    val topicSet = Set(topic) //设置kafka的topic;
    val kafkaParams = Map[String, Object](
      "auto.offset.reset" -> "earliest",   //latest;earliest
      "value.deserializer" -> classOf[StringDeserializer] //key,value的反序列化;
      , "key.deserializer" -> classOf[StringDeserializer]
      , "bootstrap.servers" -> PropertiesScalaUtils.loadProperties("broker")
      , "group.id" -> PropertiesScalaUtils.loadProperties("groupId_es")
      , "enable.auto.commit" -> (false: java.lang.Boolean)
    )
    //初始化instance;
    getInstance(scc.sparkContext)
    kafkaStreams = KafkaUtils.createDirectStream[String, String](
      scc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](topicSet, kafkaParams))
    kafkaStreams.foreachRDD(rdd => {
      val current_time = sdf.format(new Date())
      val new_time = current_time.substring(14,16).toLong
      if(new_time % 5 == 0){
        update(rdd.sparkContext,true) //五分钟更新一次广播变量的内容;
      }
      if (!rdd.isEmpty()) {
        val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges //获得偏移量对象数组
        rdd.foreachPartition(pr => {
          pr.foreach(pair => {
            val d = pair.value()
            if(instance.value.contains(d)){
              //自己的处理逻辑;
            }
          })
        })
      }
    })
    scc.start()
    scc.awaitTermination()
  }
 
  /**
    * 从sqlserver获取数据放到一个map里;
    * @return
    */
  def getSqlServerData(): Map[String,Double] = {
    val time = sdf.format(new Date())
    val enter_time = time.substring(0,10)
    var map = Map[String,Double]()
    var conn:Connection = null
    var stmt:Statement = null
    var rs:ResultSet = null
    val url = ""
    val user_name = ""
    val password = ""
    val sql = ""
    try {
      conn = DriverManager.getConnection(url,user_name,password)
      stmt = conn.createStatement
      rs = stmt.executeQuery(sql)
      while (rs.next) {
        val url = rs.getString("url")
        val WarningPrice = rs.getString("WarningPrice").toDouble
        map += (url -> WarningPrice)
      }
      if (rs != null) {
        rs.close
        rs = null
      }
      if (stmt != null) {
        stmt.close
        stmt = null
      }
      if (conn != null) {
        conn.close
        conn = null
      }
    } catch {
      case e: Exception => e.printStackTrace()
        println("sqlserver连接失败:" + e)
    }
    map
  }
 
  /**
    * 更新instance;
    * @param sc
    * @param blocking
    */
  def update(sc: SparkContext, blocking: Boolean = false): Unit = {
    if (instance != null){
      instance.unpersist(blocking)
      instance = sc.broadcast(getSqlServerData())
    }
  }
 
  /**
    * 初始化instance;
    * @param sc
    * @return
    */
  def getInstance(sc: SparkContext): Broadcast[Map[String,Double]] = {
    if (instance == null) {
      synchronized {
        if (instance == null) {
          instance = sc.broadcast(getSqlServerData())
        }
      }
    }
    instance
  }
}
 
 

每隔五分钟广播到executor上,广播变量在driver定义,在executor只读不能修改

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值