添加pom依赖
<!-- https://mvnrepository.com/artifact/com.pivotal/greenplum-jdbc -->
<dependency>
<groupId>com.pivotal</groupId>
<artifactId>greenplum-jdbc</artifactId>
<version>5.1.4</version>
</dependency>
<!-- https://mvnrepository.com/artifact/mysql/mysql-connector-java -->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.47</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.oracle/ojdbc6 -->
<dependency>
<groupId>com.oracle</groupId>
<artifactId>ojdbc6</artifactId>
<version>11.2.0.3</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>9.4-1201-jdbc41</version>
</dependency>
编写工具类
package spark.ds.jdbc
import org.apache.spark.sql.{DataFrame, SparkSession}
//MysqlDriver:"com.mysql.jdbc.Driver"
//OracleDriver:"oracle.jdbc.driver.OracleDriver"
//GreenplumDriver:"com.pivotal.jdbc.GreenplumDriver"
//postgresqlDriver:"org.postgresql.Driver"
//MysqlURL:"jdbc:mysql://localhost:3306/databaseName"
//OracleURL:"jdbc:oracle:thin:@//localhost:1521:databaseName"
//GreenplumURL:"jdbc:pivotal:greenplum://localhost:15432;DatabaseName=databaseName"
//postgresqlURL:"jdbc:postgresql://localhost:5432/databaseName"
//saveMode
// - `overwrite`: overwrite the existing data.
// - `append`: append the data.
// - `ignore`: ignore the operation (i.e. no-op).
// - `error` or `errorifexists`: default option, throw an exception at runtime.
object JDBCSource {
def createDF(session: SparkSession, param: Map[String, AnyRef]): DataFrame = {
session.sqlContext.read
.format("jdbc")
.option("driver", param.get("driver").get.toString)
.option("url", param.get("url").get.toString)
.option("dbtable", param.get("dbtable").get.toString)
.option("user", param.get("user").get.toString)
.option("password", param.get("password").get.toString)
.load()
}
}
object JDBCSink {
def save(df: DataFrame, saveMode:String,param: Map[String, AnyRef]): Unit = {
df.write
.mode(saveMode)
.format("jdbc")
.option("driver", param.get("driver").get.toString)
.option("url", param.get("url").get.toString)
.option("dbtable", param.get("dbtable").get.toString)
.option("user", param.get("user").get.toString)
.option("password", param.get("password").get.toString)
.save()
}
}
测试类
package test
import com.spark.ds.jdbc.{JDBCSink, JDBCSource}
import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}
object jdbc_test {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("Greenplum_test").setMaster("local[*]")
val sc = new SparkContext(conf)
sc.setLogLevel("WARN")
val spark = SparkSession.builder().config(conf).getOrCreate()
// MySQL / Oracle / PostgreSQL / Greenplum
// 以mysql为例,Oracle、 PostgreSQL、Greenplum只需更改相应参数即可
//从mysql数据库中读取数据并加载成DF
val mysql_read = Map(
"driver"->"com.mysql.jdbc.Driver",
"url"->"jdbc:mysql://172.16.13.185:3306/test",
"dbtable"->"test.write1"
"user"->"user123",
"password"->"user123"
)
val view1 = JDBCSource.createDF(spark,mysql_read)
view1.show()
//spark将结果写入mysql数据库中
val mysql_write = Map(
"driver"->"com.mysql.jdbc.Driver",
"url"->"jdbc:mysql://172.16.13.185:3306/test",
"dbtable"->"test.write2"
"user"->"user123",
"password"->"user123"
)
JDBCSink.save(view1,"append",mysql_write)
sc.stop()
}
}