从mysql中读取数据
// 读取数据库中的数据
val jdbcDF = spark.read.format("jdbc").
option("url", "jdbc:mysql://localhost:3306/spark").
option("driver","com.mysql.jdbc.Driver").
option("dbtable", "student").
option("user", "hive").
option("password", "hive").load()
jdbcDF.show
jdbcDF.printSchema
将数据集存储到mysql中过程
import java.util.Properties
val prop = new Properties()prop.put("user", "hive")
prop.put("password", "hive")
prop.put("driver","com.mysql.jdbc.Driver")
jdbcDF.write.
mode(SaveMode.Append).
jdbc("jdbc:mysql://node3:3306/spark", "spark.student", prop)