package com.citic.guoan.test
import org.apache.spark.SparkConf
import org.apache.spark.api.java.JavaSparkContext
import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode, SparkSession}
import org.apache.spark.sql.hive.HiveContext
import java.io.{File, Serializable}
import java.util.Properties
import com.mysql.jdbc.JDBC4MysqlSQLXML
object LoadDB extends Serializable {
private var sparkConf: SparkConf = null
private var javaSparkContext: JavaSparkContext = null
private var hiveContext: HiveContext = null
private var sqlContext: SQLContext = null
def main(args: Array[String]): Unit = {
initSparkContext()
initSQLContext()
initHiveContext()
System.out.println(" ---------------------- start hive2db ------------------------")
hive2db(args(0),args(1))
System.out.println(" ---------------------- finish hive2db ------------------------")
// System.out.println(""" --
scala hive数据到mysql 含分区.
最新推荐文章于 2024-06-13 18:01:24 发布