package day09
import java.util.Properties
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}
/**
* @author yangkun
* @date 2020/10/29 9:24
* @version 1.0
*/
object Spark01_SQL_MySQL {
def main(args: Array[String]): Unit = {
//创建配置文件对象
val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkSQL01_MySQL")
//创建SparkSession对象
val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
// val df: DataFrame = spark.read.json("input\\test.json")
import spark.implicits._
println("========= spark sql read mysql ========")
/*
println("=== method 1 ==")
val df: DataFrame = spark.read.format("jdbc")
.option("url", "jdbc:mysql://hadoop100:3306/test?useSSL=false")
.option("driver", "com.mysql.jdbc.Driver")
.option("user", "root")
.option("password", "123")
.option("dbtable", "user")
.load()
df.show()*/
// 方式二
// val df: DataFrame = spark.read.format("jdbc")
// .options(
// Map(
// "url" -> "jdbc:mysql://hadoop100:3306/test?user=root&password=123",
// "driver" -> "com.mysql.jdbc.Driver",
// "dbtable" -> "user"
// )
// ).load()
// df.show()
//方式三:
//从MySQL数据库中读取数据 方式3
val props: Properties = new Properties()
props.setProperty("user","root")
props.setProperty("password","123")
props.setProperty("driver","com.mysql.jdbc.Driver")
val df: DataFrame = spark.read.jdbc("jdbc:mysql://hadoop100:3306/test","user",props)
df.show()
spark.stop()
}
}
spark sql 从mysql读数据
最新推荐文章于 2023-12-13 16:37:21 发布