引入依赖
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.38</version>
</dependency>
<dependency>
<groupId>commons-dbcp</groupId>
<artifactId>commons-dbcp</artifactId>
<version>1.4</version>
</dependency>
使用Java编写一个数据库连接池类
package cn.itcast.spark.day7;
import java.sql.Connection;
import java.sql.DriverManager;
import java.util.LinkedList;
public class ConnectionPool {
private static LinkedList<Connection> connectionQueue;
static {
try {
Class.forName("com.mysql.jdbc.Driver");
}catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
public synchronized static Connection getConnection() {
try {
if (connectionQueue == null) {
connectionQueue = new LinkedList<Connection>();
for (int i = 0;i < 5;i ++) {
Connection conn = DriverManager.getConnection(
"jdbc:mysql://localhost:3306/test?characterEncoding=utf8&useSSL=true",
"root",
"root"
);
connectionQueue.push(conn);
}
}
}catch (Exception e) {
e.printStackTrace();
}
return connectionQueue.poll();
}
public static void returnConnection(Connection conn) {
connectionQueue.push(conn);
}
}
spark代码
package cn.itcast.spark.day7
import org.apache.spark.{SparkConf, TaskContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}
object sqlTest {
def main(args: Array[String]){
val conf = new SparkConf().setMaster("local[2]").setAppName("w")
val ssc = new StreamingContext(conf,Seconds(5))
val lines = ssc.socketTextStream("101.132.122.75",9999)
val words = lines.flatMap(_.split(" "))
val wordcount = words.map(x => (x,1)).reduceByKey(_+_)
wordcount.foreachRDD(rdd => {
//rdd不为空执行插入代码
if(rdd.isEmpty!){
rdd.foreachPartition(eachPartition => {
val conn = ConnectionPool.getConnection();
eachPartition.foreach(record => {
val sql = "insert into streaming(item,count) values('" + record._1 + "'," + record._2 + ")"
val stmt = conn.createStatement
stmt.executeUpdate(sql)
})
ConnectionPool.returnConnection(conn)
})
}
})
ssc.start()
ssc.awaitTermination()
}
}