Spark 之 MySQL数据的加载和存储
import java.util.Properties
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.SparkConf
object MysqlTest {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("MysqlTest").setMaster("local[*]")
val session = SparkSession.builder().config(conf).getOrCreate()
val context = session.sparkContext
session.read.format("jdbc")
.option("url", "jdbc:mysql://localhost:3306/spark_sql")
.option("driver", "com.mysql.jdbc.Driver")
.option("user", "root")
.option("password", "000000")
.option("dbtable", "test1")
.load().show()
session.read.format("jdbc")
.options(Map("url"->"jdbc:mysql://localhost:3306/spark_sql?user=root&password=000000",
"dbtable"->"test1",
"driver"->"com.mysql.jdbc.Driver"))
.load().show()
val properties = new Properties()
properties.setProperty("user", "root")
properties.setProperty("password", "000000")
session.read.jdbc("jdbc:mysql://localhost:3306/spark_sql", "test1", properties)
.show()
import session.implicits._
val dataRDD = context.makeRDD(List(test1(1, "李四", 20), test1(2, "lisi", 20)))
val dataDS = dataRDD.toDS()
dataDS.write
.format("jdbc")
.option("url", "jdbc:mysql://localhost:3306/spark_sql")
.option("user", "root")
.option("password", "000000")
.option("dbtable", "test1")
.mode(SaveMode.Append)
.save()
val properties2 = new Properties()
properties2.setProperty("user", "root")
properties2.setProperty("password", "000000")
dataDS.write.mode(SaveMode.Append).jdbc("jdbc:mysql://localhost:3306/spark_sql", "test1", properties2)
session.close()
context.stop()
}
case class test1(id: Int, name: String, age: Int)
}