import org.apache.spark.rdd.RDD
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}
object Main {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("datacopy")
/*.setMaster("local")*/
val sc = new SparkContext(conf)
val sqlContext = new HiveContext(sc)
val url = "jdbc:mysql://192.168.20.29:3306/vboxDB?useUnicode=true&characterEncoding=utf-8&useSSL=false"
val reader = sqlContext.read.format("jdbc")
var options = Map[String, String]()
options += ("url" -> url)
options += ("driver" -> "com.mysql.jdbc.Driver")
options += ("user" -> "root")
options += ("password" -> "new.1234")
options += ("dbtable" -> "play_name")
reader.options(options)
var originalDF: DataFrame = reader.load()
originalDF.show()
//写入hive
originalDF.write.mode(SaveMode.Overwrite).saveAsTable("vboxdb.play_name")
}
}
这里有一个点需要注意的是在spark-shell中的sqlcontext是Hivecontext的实例而不是sqlContext的实例