pom文件中添加Mysql依赖
pom文件添加依赖:
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.38</version>
</dependency>
读取Mysql数据源
import java.util.Properties
import org.apache.spark.SparkContext
import org.apache.spark.sql.{DataFrame, SparkSession}
object SparksqlToMysql {
def main(args: Array[String]): Unit = {
//1.创建SparkSession对象
val spark: SparkSession = SparkSession.builder().appName("SparksqlToMysql").master("local[*]").getOrCreate()
//2.指定数据库连接
val url = "jdbc:mysql://node03:3306/spark"
//3.指定表名称
val table = "student"
//4.指定用户名和密码
val properties = new Properties()
properties.setProperty("user","root")
properties.setProperty("password", "123456")
//5.读取数据库表数据
val studentDF: DataFrame = spark.read.jdbc(url, table, properties)
//6.展示数据
studentDF.createTempView("student")
spark.sql("select * from student").show()
//6.结束session
spark.stop()
}
}
读取csv文件插入Mysql表
import java.util.Properties
import org.apache.spark.sql.{DataFrame, SparkSession}
object SparksqlToMysql02 {
def main(args: Array[String]): Unit = {
//1.创建SparkSession对象
val spark: SparkSession = SparkSession.builder().master("local[*]").appName("SparksqlToMysql02").getOrCreate()
spark.sparkContext.setLogLevel("warn")
//2.加载csv文件
val courseDF: DataFrame = spark
.read
.format("csv")
.option("header", "true") //导入标题
.load(this.getClass.getClassLoader.getResource("course.csv").getPath)
//3.映射表
courseDF.createTempView("course")
//4.查找数据
val res = spark.sql("select * from course")
//5.配置mysql连接
val properties = new Properties()
val url = "jdbc:mysql://node03:3306/spark?useUnicode=true&characterEncoding=utf8"
val table = "course"
properties.setProperty("user", "root")
properties.setProperty("password", "123456")
//6.写入数据
/**
* Specifies the behavior when data or table already exists. Options include:
* - `overwrite`: overwrite the existing data.
* - `append`: append the data.
* - `ignore`: ignore the operation (i.e. no-op).
* - `error` or `errorifexists`: default option, throw an exception at runtime.
*
* @since 1.4.0
*/
res.write.mode("append").jdbc(url, table, properties)
//7.关闭session
spark.stop()
}
}
–The End–