package Mysql
import java.util.Properties
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.{SparkConf, SparkContext}
object SparkSQL1 {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName(s"${this.getClass.getSimpleName}").setMaster("local")
val sc = new SparkContext(conf)
val sqlContext = new HiveContext(sc)
val properties = new Properties()
properties.put("user","root")
properties.put("password","root")
val url = "jdbc:mysql://hadoop02:3306/hivedb"
val df: DataFrame = sqlContext.read.jdbc(url,"CDS",properties)
df.show()
}
}
第二种:
package Mysql
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.hive.HiveContext
import org.apache.spa