话不多说,我知道各位大佬都很忙直接上代码 package com.hr.utils import java.sql.{Connection, DriverManager, PreparedStatement} import org.apache.spark.sql.SparkSession import org.apache.spark.sql.functions._ import scala.collection.mutable.ListBuffer import java.sql.{Connection, DriverManager, PreparedStatement} import java.text.SimpleDateFormat import org.apache.spark.{SparkConf, SparkContext, rdd} import org.apache.spark.sql.Row import com.hr.utils.DataSourceUtil._ import org.apache.spark /** * HF * 2020-06-22 23:55 */ object MysqlSink_all { Class.forName(real_jdbcDriver) //注册驱动
def getConnection() = { DriverManager.getConnection(real_jdbcUrl, real_jdbcUser, real_jdbcPassword) //获取连接对象 } def release(connection: Connection, pstmt: PreparedStatement): Unit = { try { if (pstmt != null) { pstmt.close() } } catch { case e: Exception => e.printStackTrace() } finall
spark DataFrame 分区写入mysql的工具类
最新推荐文章于 2021-05-24 20:53:23 发布