DataFrame保存到mysql
import java.util.Properties
import cn.doit.sparksql.day01.utils.SparkUtils
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
/**
* @description:DataFrame保存到mysql
**/
object DFSaveMysql {
def main(args: Array[String]): Unit = {
val spark: SparkSession = SparkUtils.getSparkSession()
import spark.implicits._
// 获得一个DF
val frame: DataFrame = spark.read
.options(Map("header" -> "true", "inferSchema" -> "true"))
.csv("doc/stu2.csv")
frame.printSchema()
frame.show()
val pro = new Properties()
pro.setProperty("user", "root")
pro.setProperty("password", "123456")
//
// Table or view 'people' already exists. SaveMode: ErrorI