spark foreachPartition 把df 数据插入到mysql
转载自:http://www.waitingfy.com/archives/4370,确实写的不错
import java.sql.{Connection, DriverManager, PreparedStatement}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._
import scala.collection.mutable.ListBuffer
object foreachPartitionTest {
case class TopSongAuthor(songAuthor:String, songCount:Long)
def getConnection() = {
DriverManager.getConnection("jdbc:mysql://localhost:3306/baidusong?user=root&password=root&useUnicode=true&characterEncoding=UTF-8")
}
def release(connection: Connection, pstmt: PreparedStatement): Unit = {
try {
if (pstmt != null) {
pstmt.