import java.sql.DriverManager
import org.apache.kafka.common.TopicPartition
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.kafka010.OffsetRange
import scala.collection.mutable
object DataToMysql {
def getconn ={
DriverManager.getConnection(
"jdbc:mysql://localhost:3306/test",
"root",
"1234"
)
}
//保存到数据库mysql
def saveToMysql(result1: RDD[(String, Int)])={
result1.foreachPartition(filter=>{
val conn=getconn
//遍历。储存数据
filter.
MyUtil:保存数据到Mysql
![](https://img-home.csdnimg.cn/images/20240711042549.png)