object HiveTableHelper extends Logging {
def hiveTableInit(sc: SparkContext): HiveContext = {
val sqlContext = new HiveContext(sc)
sqlContext
}
def writePartitionTable(HCtx: HiveContext, inputRdd: RDD[Row], tabName: String, colNames: String): Unit = {
val schema = StructType(
colNames.split(" ").map(fieldName => StructField(fieldName, StringType, true))
)
val table = colNames.replace(" dt", "").split(" ").map(name => name + " String").toList.toString().replace("List(", "").replace(")", "")
val df = HCtx.createDataFrame(inputRdd, schema)
//df.show(5)
//logInfo("----------------------------------begin write table-----------------------------------")
val temptb = "temp" + tabName
//HCtx.sql("drop table if exists " + tabName)
df.registerTempTable(temptb)
HCtx.sql("CREATE EXTERNAL TABLE if not exists " + tabName + " (" + table + ") PARTITIONED BY (`dt` string) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat' location '/spdbccc/data/dest/SPSJNEW/"+tabName+"'")
HCtx.sql("set hive.exec.dynamic.partition.mode = nonstrict")
HCtx.sql("insert overwrite table " + tabName + " partition(`dt`)" + " select * from " + temptb)
}
}
SparkSQL写数据到Hive的动态分区表
最新推荐文章于 2024-08-10 16:51:27 发布