package Spark_SQL
import org.apache.spark.sql.SparkSession
object Spark_Read_JDBC {
def main(args: Array[String]): Unit = {
val spark = SparkSession.builder()
.appName("test")
.master("local[*]")
.getOrCreate()
//todo: my db is mysql 8.0 db so driver address is "useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Hongkong"
val url = "jdbc:mysql://master:3306/flume?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Hongkong"
case class tb_books(book_Url: String,
book_Address: String,
book_Price: Int,
book_Count: String,
book_Title: String,
book_Shop_Address: String,
shop_locate: String
)
val jdbcDf = spark.read.format("jdbc")
.option("driver", "com.mysql.cj.jdbc.Driver")
.option("url", url)
.option("user", "root")
.option("password", "123456")
.option("dbtable", "TB_Books")
.load().show(10)
spark.stop()
}
}
pom.xml
2.7.4
8.0.11
2.3.3
org.apache.spark
spark-core_2.11
${spark.version}
org.apache.spark
spark-streaming_2.11
${spark.version}
org.apache.spark
spark-sql_2.11
${spark.version}
org.apache.spark
spark-streaming-kafka-0-10_2.11
${spark.version}
org.apache.hadoop
hadoop-client
${hadoop.version}
mysql
mysql-connector-java
${mysql.version}