package SparkDemo
import java.sql.{Connection, DriverManager, PreparedStatement}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
object DStreamToMySQL {
//定义更新函数
def updateFunc(newValues : Seq[Int],state :Option[Int]):Option[Int] = {
val currentCount = newValues.foldLeft(0)(_+_)
val previousCount = state.getOrElse(0)
Some(currentCount+previousCount)
}
def main(args : Array[String]): Unit ={
//建立SparkStream
val conf = new SparkConf().setAppName("DStreamToMySQL")
val ssc = new StreamingContext(conf,Seconds(1))
//设置日志等级
StreamingLoggingExample.setStreamingLogLevels()
val lines = ssc.textFileStream("/tmp/yuhang.zhang/data")
val words = lines.flatMap(_.split(" "))