SparkSQL写数据到Hive的动态分区表

object HiveTableHelper extends Logging {

  def hiveTableInit(sc: SparkContext): HiveContext = {
    val sqlContext = new HiveContext(sc)
    sqlContext
  }

  def writePartitionTable(HCtx: HiveContext, inputRdd: RDD[Row], tabName: String, colNames: String): Unit = {
    val schema = StructType(
      colNames.split(" ").map(fieldName => StructField(fieldName, StringType, true))
    )
    val table = colNames.replace(" dt", "").split(" ").map(name => name + " String").toList.toString().replace("List(", "").replace(")", "")
    val df = HCtx.createDataFrame(inputRdd, schema)
    //df.show(5)
    //logInfo("----------------------------------begin write table-----------------------------------")
    val temptb = "temp" + tabName
    //HCtx.sql("drop table if exists " + tabName)
    df.registerTempTable(temptb)
    HCtx.sql("CREATE EXTERNAL TABLE if not exists " + tabName + " (" + table + ") PARTITIONED BY (`dt` string) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS INPUTFORMAT  'org.apache.hadoop.mapred.SequenceFileInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat' location '/spdbccc/data/dest/SPSJNEW/"+tabName+"'")
    HCtx.sql("set hive.exec.dynamic.partition.mode = nonstrict")
    HCtx.sql("insert overwrite table " + tabName + " partition(`dt`)" + " select * from " + temptb)
  }
}
评论 4
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值