SparkSession scs = SparkSession.
builder().appName("getGbaseWriteHiveJob")
.master("local[*]")
.enableHiveSupport()
.getOrCreate();
JavaSparkContext sc = new JavaSparkContext(scs.sparkContext());
try{
if(fields.getDemo1_date_state().equals("yes")){
Optional<List<Demo1>> optional = Optional.ofNullable(demo1Mapper.getDemo1All_Date(datadate_t2));
if(optional.isPresent()){
log.info(".....开始入库表:"+fields.getDemo1_hive_tb());
List<Demo1> listDemo1 = optional.get();
JavaRDD<Demo1> demo1 = sc.parallelize(listDemo1,Integer.parseInt(fields.getDemo1_rdd_prnum()));
Dataset<Row> dataFrame = scs.createDataFrame(demo1, Demo1.class);
dataFrame.createOrReplaceTempView(fields.getDemo1_dfview_name());
scs.sql(fields.getHive_db());
scs.sql(fields.getDemo1_hive_insert_tb_sql() +"(dt='"+datadate_t2+"') " + fields.getDemo1_dfview_find_sql());
log.info(".....入库表:"+fields.getDemo1_hive_tb()+"分区:dt="+datadate_t2+"成功.....");
list.add(".....入库表:"+fields.getDemo1_hive_tb()+"分区:dt="+datadate_t2+"成功.....");
}else{
log.info(".....入库表:"+fields.getDemo1_hive_tb()+"分区:dt="+datadate_t2+"失败获取Gbase集合对象为空.....");
}
}
}catch (Exception e){
log.info(".....入库表:"+fields.getDemo1_hive_tb()+"分区:dt="+datadate_t2+"失败:"+e.getMessage());
}