Hive学习笔记-API简单操作

Hive以Thrift服务的服务器形式运行,允许不同的编程语言编写客户端进行访问,可以通过thrift,jdbc,odbc等连接器与Hive服务器通信,进行相关操作。

public class HiveUtils extends AbstrUtils {
	
	private static TTransport transport = null;
	
	private static TProtocol protocol = null;
	//连接池
	private static ConnectionPool connectionPool = null;
	
	static{
		try{
			Properties properties = PropertiesUtils.obtainValues("jdbc/jdbc.properties");
			connectionPool = new ConnectionPool(properties.getProperty("hive.driverClassName"), 
					properties.getProperty("hive.url"), "", "");
			transport = new TSocket(properties.getProperty("hive.host.ip"),
					Integer.parseInt(properties.getProperty("hive.host.port")));
			protocol = new TBinaryProtocol(transport);
			transport.open();
		}catch (Exception e) {
			logger.info(e.getMessage(), e);
		}
	}
	
	//获取连接池的连接
	public static synchronized Connection obtainConnection() {
		Connection connection = null;
		try {
			connection = connectionPool.getConnection();
		} catch (SQLException e) {
			logger.info(e.getMessage(), e);
		}
		return connection;
	}
	
	public static synchronized HiveClient obtainHiveClient() {
		return new HiveClient(protocol);
	}
	
	public static void executeJDBC(String sql, Object... params) {
		Connection conn = null;
		PreparedStatement pstmt = null;
		try{
			conn = obtainConnection();
			pstmt = conn.prepareStatement(sql);
			if(null != params){
				for(int i = 0; i < params.length; i++){
					pstmt.setObject(i+1, params[i]);
				}
			}
			pstmt.executeQuery();
		} catch (SQLException e) {
			logger.info(e.getMessage(), e);
		} finally {
			release(conn, pstmt, null);
		}
	}
	
	public static void executeClient(String sql) {
		try {
			obtainHiveClient().execute(sql);
		} catch (Exception e) {
			logger.info(e.getMessage(), e);
		} 
	}
	
	/**
	 * 释放连接
	 * @param conn
	 * @param st
	 * @param rs
	 */
	public static void release(Connection conn, Statement st, ResultSet rs) {
		try{
			if (null != rs) rs.close();
			if (null != st) st.close();
			if (null != conn) conn.close();
		}catch (Exception e) {
			logger.info(e.getMessage(), e);
		}
	}
	
	/**
	 * 返回连接到连接池
	 * @param conn
	 */
	public static void returnConnection (Connection conn) {
		try{
			if (null != conn) connectionPool.returnConnection(conn);
		}catch (Exception e) {
			logger.info(e.getMessage(), e);
		}
	}

}
属性的相关信息如下:

hive.host.ip=192.168.10.10
hive.host.port=10000
hive.driverClassName=org.apache.hadoop.hive.jdbc.HiveDriver
hive.url=jdbc:hive://192.168.10.10:10000/default
hive.username=hive
hive.password=hive
测试用例:
public class HiveUtilsTest {

	@Test
	public void testHiveJdbcExecute() throws Exception {
		String sql = "select * from pokes";
		Connection conn = HiveUtils.obtainConnection();
		PreparedStatement pstmt = conn.prepareStatement(sql);
		ResultSet resultSet = pstmt.executeQuery();
		while (resultSet.next()) {
			System.out.println(resultSet.getObject(2));
		}
		HiveUtils.release(conn, pstmt, resultSet);
	}
	
	@Test
	public void testHiveCreate() throws Exception {
		String sql = "create table t_plat_test(id int, title string, content string)";
		HiveUtils.executeJDBC(sql);
	}
	
	@Test
	public void testHiveClientExecute() {
		String sql = "insert overwrite directory '/user/hadoop/pokes' select * from pokes";
		HiveUtils.executeClient(sql);
	}
	
}



  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值