1、配置文件
package config
import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}
case object conf {
private val master = "local[*]"
val confs: SparkConf = new SparkConf().setMaster(master).setAppName("jobs")
// val confs: SparkConf = new SparkConf().setMaster("http://laptop-2up1s8pr:4040/").setAppName("jobs")
val sc = new SparkContext(confs)
sc.setLogLevel("ERROR")
val spark_session: SparkSession = SparkSession.builder()
.appName("jobs").config(confs).getOrCreate()
// 设置支持笛卡尔积 对于spark2.0来说
spark_session.conf.set("spark.sql.crossJoin.enabled",true)
}
2、连接mysql8.0 操作多表
package operationMysql
import config.conf.{sc, spark_session}
import org.apache.spark.rdd.RDD
import org