Hive以Thrift服务的服务器形式运行,允许不同的编程语言编写客户端进行访问,可以通过thrift,jdbc,odbc等连接器与Hive服务器通信,进行相关操作。
public class HiveUtils extends AbstrUtils {
private static TTransport transport = null;
private static TProtocol protocol = null;
//连接池
private static ConnectionPool connectionPool = null;
static{
try{
Properties properties = PropertiesUtils.obtainValues("jdbc/jdbc.properties");
connectionPool = new ConnectionPool(properties.getProperty("hive.driverClassName"),
properties.getProperty("hive.url"), "", "");
transport = new TSocket(properties.getProperty("hive.host.ip"),
Integer.parseInt(properties.getProperty("hive.host.port")));
protocol = new TBinaryProtocol(transport);
transport.open();
}catch (Exception e) {
logger.info(e.getMessage(), e);
}
}
//获取连接池的连接
public static synchronized Connection obtainConnection() {
Connection connection = null;
try {
connection = connectionPool.getConnection();
} catch (SQLException e) {
logger.info(e.getMessage(), e);
}
return connection;
}
public static synchronized HiveClient obtainHiveClient() {
return new HiveClient(protocol);
}
public static void executeJDBC(String sql, Object... params) {
Connection conn = null;
PreparedStatement pstmt = null;
try{
conn = obtainConnection();
pstmt = conn.prepareStatement(sql);
if(null != params){
for(int i = 0; i < params.length; i++){
pstmt.setObject(i+1, params[i]);
}
}
pstmt.executeQuery();
} catch (SQLException e) {
logger.info(e.getMessage(), e);
} finally {
release(conn, pstmt, null);
}
}
public static void executeClient(String sql) {
try {
obtainHiveClient().execute(sql);
} catch (Exception e) {
logger.info(e.getMessage(), e);
}
}
/**
* 释放连接
* @param conn
* @param st
* @param rs
*/
public static void release(Connection conn, Statement st, ResultSet rs) {
try{
if (null != rs) rs.close();
if (null != st) st.close();
if (null != conn) conn.close();
}catch (Exception e) {
logger.info(e.getMessage(), e);
}
}
/**
* 返回连接到连接池
* @param conn
*/
public static void returnConnection (Connection conn) {
try{
if (null != conn) connectionPool.returnConnection(conn);
}catch (Exception e) {
logger.info(e.getMessage(), e);
}
}
}
属性的相关信息如下:
hive.host.ip=192.168.10.10
hive.host.port=10000
hive.driverClassName=org.apache.hadoop.hive.jdbc.HiveDriver
hive.url=jdbc:hive://192.168.10.10:10000/default
hive.username=hive
hive.password=hive
测试用例:
public class HiveUtilsTest {
@Test
public void testHiveJdbcExecute() throws Exception {
String sql = "select * from pokes";
Connection conn = HiveUtils.obtainConnection();
PreparedStatement pstmt = conn.prepareStatement(sql);
ResultSet resultSet = pstmt.executeQuery();
while (resultSet.next()) {
System.out.println(resultSet.getObject(2));
}
HiveUtils.release(conn, pstmt, resultSet);
}
@Test
public void testHiveCreate() throws Exception {
String sql = "create table t_plat_test(id int, title string, content string)";
HiveUtils.executeJDBC(sql);
}
@Test
public void testHiveClientExecute() {
String sql = "insert overwrite directory '/user/hadoop/pokes' select * from pokes";
HiveUtils.executeClient(sql);
}
}