Hive数据库连接-连接池实现
通过HiveJDBC获取Hive的连接Connection,下面我们简单介绍HiveJDBC数据库连接实现
HiveJDBC配置文件
连接池配置文件hive-jdbc.properties
# 初始化连接池数
db.jdbcPoolInitSize=5
# HIVE的Driver,不需要修改
db.driver=org.apache.hive.jdbc.HiveDriver
# hiveserver2 的ip地址
db.ip=hadoop01
# hiveserver2 的客户端端口,默认为10000
db.port=10000
# Hive连接是否开启Kerberos认证
hive.iskrb5=false
# kerberos认证开启hive认证主体
hive.principal=hive/hadoop01@HADOOP.COM
# HIVE数据库名称
db.name=dbname
# 用户名
db.username=username
# 密码
db.password=password
# 数据库HDFS目录
storage.directory=/user/hive/warehouse
读取HiveJDBC配置文件
public class HiveJdbcPer {
/**
* hive-jdbc.properties配置文件Properties
*/
public static Properties properties = PropertiesUtil.getProperties("hive-jdbc.properties");
/**
* 初始化连接池数
*/
static String jdbcPoolInitSize = properties.getProperty("db.jdbcPoolInitSize");
/**
* Hive JDBC Driver类
*/
static String dbDriver = properties.getProperty("db.driver");
/**
* hiveserver2 的ip地址
*/
static String dbIP = properties.getProperty("db.ip");
/**
* hiveserver2 的端口
*/
static String dbPort = properties.getProperty("db.port");
/**
* hiveserver2连接数据库名称
*/
public static String dbName = properties.getProperty("db.name");
/**
*
* hiveserver2连接用名名称
*/
final static String dbUsername = properties.getProperty("db.username");
/**
* hiveserver2连接用名密码
*/
final static String dbPassword = properties.getProperty("db.password");
/**
* hiveserver2连接是否开启Kerberos认证
*/
final static String iskrb5 = properties.getProperty("db.iskrb5");
/**
* hiveserver2开启Kerberos认证主体
*/
final static String krb5Principal = properties.getProperty("hive.principal")
/**
* Hive集群上的HDFS存储目录
*/
static String storageDirectory = properties.getProperty("storage.directory");
}
HiveJDBC连接池类
连接池通过实现javax.sql.DataSource
public class HiveJdbcPool implements DataSource {
private static final Logger LOGGER = LoggerFactory.getLogger(HiveJdbcPool.class);
/**
* @Field: Connections
* 使用Vector来存放数据库链接,
* Vector具备线程安全性
*/
private static final Vector connections = new Vector();
static {
//在静态代码块中加载db.properties数据库配置文件
try {
//数据库连接池的初始化连接数大小
int jdbcPoolInitSize = Integer.parseInt(HiveJdbcPer.jdbcPoolInitSize);
StringBuilder urlSB = new StringBuilder();
//加载数据库驱动
Class.forName(HiveJdbcPer.dbDriver);
urlSB.append( "jdbc:hive2://").append(HiveJdbcPer.dbIP).append(":").append(HiveJdbcPer.dbPort);
if(HiveJdbcPer.iskrb5.equalsIgnoreCase("true")){
urlSB.append("/;principal=").append(HiveJdbcPer.krb5Principal);
}
String dbURL = urlSB.toString();
for (int i = 0; i < jdbcPoolInitSize; i++) {
Connection conn = DriverManager.getConnection(dbURL, HiveJdbcPer.dbUsername, HiveJdbcPer.dbPassword);
LOGGER.debug("获取到了链接:{}", conn);
//将获取到的数据库连接加入到Connections集合中,Connections此时就是一个存放了数据库连接的连接池
connections.addElement(conn);
}
} catch (SQLException e) {
LOGGER.error(" 创建数据库连接失败!", e.getMessage());
try {
throw new SQLException(" 创建数据库连接失败!");
} catch (SQLException e1) {
e1.printStackTrace();
}
}catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
@Override
public Connection getConnection() throws SQLException {
synchronized (connections) {
//如果数据库连接池中的连接对象的个数大于0
if (connections.size() > 0){
final Connection conn = (Connection) connections.remove(0);
//注意需要添加OracleConnection.class,不然JdbcUtil.createOracleLob创建Clob会报错无法转换
return (Connection) Proxy.newProxyInstance(HiveJdbcPool.class.getClassLoader(), new Class[]{Connection.class}, (proxy, method, args) -> {
if (!method.getName().equals("close")) {
return method.invoke(conn, args);
} else {
//如果调用的是Connection对象的close方法,就把conn还给数据库连接池
connections.addElement(conn);
LOGGER.debug(conn + "被还给Connections数据库连接池了!!");
LOGGER.debug("Connections数据库连接池大小为" + connections.size());
return null;
}
});
} else throw new RuntimeException("对不起,数据库连接失败!");
}
}
@Override
public Connection getConnection(String username, String password) throws SQLException {
return null;
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
return null;
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
return false;
}
@Override
public PrintWriter getLogWriter() throws SQLException {
return null;
}
@Override
public void setLogWriter(PrintWriter out) throws SQLException {
}
@Override
public void setLoginTimeout(int seconds) throws SQLException {
}
@Override
public int getLoginTimeout() throws SQLException {
return 0;
}
@Override
public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException {
return null;
}
}
HiveUtil工具类获取连接池中连接
public class HiveJdbcUtil {
private static final Logger LOGGER = LoggerFactory.getLogger(HiveJdbcUtil.class);
/**
* @Field: pool
* 数据库连接池
*/
private static HiveJdbcPool pool = new HiveJdbcPool();
public static Connection getConnection() {
Connection conn = null;
try {
conn = pool.getConnection();
} catch (SQLException e) {
e.printStackTrace();
LOGGER.error("Hive JDBC 获取连接池中连接异常!");
}
return conn;
}
public static ResultSet executeQuery(Connection conn, String sqlstring){
Statement st;
ResultSet rs = null;
try {
st = conn.createStatement();
rs= st.executeQuery(sqlstring);
} catch (SQLException e) {
e.printStackTrace();
}
return rs;
}
/**
* @Method: release
* @Description: 释放资源,
* 释放的资源包括Connection数据库连接对象,负责执行SQL命令的Statement对象,存储查询结果的ResultSet对象
*
* @param conn Connection
* @param st Statement
* @param rs ResultSet
*/
public static void release(Connection conn, Statement st, ResultSet rs){
if(rs!=null){
try{
LOGGER.debug("关闭存储查询结果的ResultSet对象");
rs.close();
}catch (Exception e) {
e.printStackTrace();
}
}
if(st!=null){
try{
LOGGER.debug("关闭负责执行SQL命令的Statement对象");
st.close();
}catch (Exception e) {
e.printStackTrace();
}
}
if(conn!=null){
try{
LOGGER.debug("关闭Connection数据库连接对象");
conn.close();
}catch (Exception e) {
e.printStackTrace();
}
}
}
}