package cn.xjw
import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}
//实体类
case class MY_TEST_DB(val id:Int,val name:String,password:String)
object DB {
def main(args: Array[String]): Unit = {
var sc = new SparkConf().setMaster("local")
.setAppName("数据库连接测试")
val spark = SparkSession.builder()
.config(sc)
.getOrCreate()
//读数据库 - - - 方式一
var conn = new java.util.Properties
conn.put("user", "root")
conn.put("password", "123")
conn.put("driver", "com.mysql.jdbc.Driver")
var dataFrame1 = spark.read
.jdbc("jdbc:mysql://localhost:3306/my_test_db?useUnicode=true&characterEncoding=utf-8&serverTimezone=GMT%2B8&useSSL=false","my_test_table",conn)
dataFrame1.show()
//读数据库 - - - 方式二
// var dataFrame2 = spark.read
// .option("driver", "com.mysql.jdbc.Driver")
// .option("url","jdbc:mysql://localhost:3306/my_test_db?useUnicode=true&characterEncoding=utf-8&serverTimezone=GMT%2B8&useSSL=false")
// .option("dbtable","my_test_table")
// .option("user","root")
// .option("password","123")
// .format("jdbc")
// .load()
//查看表结构
dataFrame1.printSchema()
//输出表的数据类型
println(dataFrame1.schema)
//查询表中 id > 1 数据
dataFrame1.select("id","name","password").where("id>1").show()
//写数据库
dataFrame1.write
.mode(SaveMode.Overwrite) // overwrite:重写 append:拼接 ......
.jdbc("jdbc:mysql://localhost:3306/my_test_db?useUnicode=true&characterEncoding=utf-8&serverTimezone=GMT%2B8&useSSL=false","my_test_table",conn)
}
}
注意:导入mysql驱动jar包