java配置hive数据源_java整合hive-jdbc

importcom.didichuxing.fe.offline.config.HiveConfig;importcom.didichuxing.fe.offline.entity.TableInfo;importcom.didichuxing.fe.offline.util.DateUtil;importcom.didichuxing.fe.offline.util.ParquetShema;importcom.didichuxing.fe.offline.util.SparkTool;importorg.apache.hadoop.conf.Configuration;importorg.apache.spark.sql.Dataset;importorg.apache.spark.sql.Row;importorg.apache.spark.sql.SparkSession;importorg.apache.tomcat.jdbc.pool.DataSource;importorg.slf4j.Logger;importorg.slf4j.LoggerFactory;importorg.springframework.dao.DataAccessException;importorg.springframework.jdbc.core.JdbcTemplate;importorg.apache.hadoop.fs.Path;importjava.nio.file.Paths;importjava.sql.ResultSet;importjava.sql.SQLException;importjava.sql.Statement;importjava.util.ArrayList;importjava.util.List;public classHiveJdbcDao {private static final Logger logger = (Logger) LoggerFactory.getLogger(HiveJdbcDao.class);private static HiveJdbcDao hiveJdbcDao = null;public staticHiveJdbcDao getInstance(){if(hiveJdbcDao == null){synchronized (MysqlBaseDao.class){if (hiveJdbcDao == null){

hiveJdbcDao= newHiveJdbcDao();

}

}

}returnhiveJdbcDao;

}privateHiveJdbcDao(){

}private DataSource jdbcDataSource =HiveConfig.getInstance().getDataSource();private JdbcTemplate hiveJdbcTemplate =HiveConfig.getInstance().getJdbcTemplate();/*** 查询hive表中字段名以及类型

*@paramabstractSql

*@return*@throwsSQLException*/

public ListselectTableInfoFromHive(String abstractSql){

List tableInfoList = new ArrayList();

TableInfo tableInfo= newTableInfo();

Statement statement= null;

logger.info("Running sql: " +abstractSql);try{

statement=jdbcDataSource.getConnection().createStatement();

ResultSet res=statement.executeQuery(abstractSql);while(res.next()) {

tableInfo.setColumnName(res.getString(1));

tableInfo.setColumnType(res.getString(2));

tableInfo.setColumnComment(res.getString(3));

tableInfoList.add(tableInfo);

}

}catch(SQLException e) {

logger.info(e.getMessage());

}returntableInfoList;

}/*** 查询hive库中表名

*@paramabstractSql

*@return*@throwsSQLException*/

public ListselectTableNameFromHive(String abstractSql){

List tableNameList = new ArrayList();

Statement statement= null;

logger.info("Running sql: " +abstractSql);try{

statement=jdbcDataSource.getConnection().createStatement();

ResultSet res=statement.executeQuery(abstractSql);

logger.error("hive表名String[]: " +res.toString());while(res.next()) {

tableNameList.add(res.getString(1));

}

}catch(SQLException e) {

logger.info(e.getMessage());

}returntableNameList;

}/*** 自动从本地数据加载进入hive

*@paramfilepath*/

public voidloadIntoHiveTable(String filepath, String tableName) {

String dateFileFormat=DateUtil.getYesterdayFileFormat();

String[] dateSplit= dateFileFormat.split("/");

StringBuffer buildSql= newStringBuffer();

buildSql.append("load data inpath " ).append("\'").append(filepath).append("\'")

.append(" into table fe.").append(tableName).append(" partition (year = ")

.append(dateSplit[0]).append(", month = ").append(dateSplit[1])

.append(",day = ").append(dateSplit[2]).append(")");//String sql = "load data inpath " + "\'" + filepath + "\'" +//" into table fe." + tableName + " partition (year = " + dateSplit[0] + ", month = "//+ dateSplit[1] + ",day = " + dateSplit[2] + ")";

logger.info("将数据加载进入hive表的sql : {}", buildSql.toString());try{

hiveJdbcTemplate.execute(buildSql.toString());

}catch(DataAccessException dae) {

logger.error(dae.toString());

}

}/*** 对hive表结构进行更新(增加字段)

*@paramabstractSql*/

public voidupdateHiveTable(String abstractSql) {try{

hiveJdbcTemplate.execute(abstractSql);

}catch(DataAccessException dae) {

logger.error(dae.toString());

}

}

}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值