hive 的JDBC/ODBC接口
1. 概述
hive提供了三种用户接口:CLI,JDBC/ODBC和 WebUI,JDBC/ODBC 是 Hive 的Java,与使用传统数据库JDBC的方式非常类似。
2. 导入依赖包
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>1.2.1</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.6.1</version>
</dependency>
3. 启动hadoop 与 hiveserver2
hadoop的bin目录 启动hadoop:./bin/start-all.sh
hive的bin目录 启动hive: ./bin/hiveserver2
4. 编码
public class HiveJdbc {
private static String driverName = "org.apache.hive.jdbc.HiveDriver";
private static String url = "jdbc:hive2://master:10000/hive";
private static String user = "root";
private static String password = "Zhm@818919";
private static Connection conn = null;
private static Statement stmt = null;
private static ResultSet rs = null;
@Before
public void init() throws Exception{
Class.forName(driverName);
conn = DriverManager.getConnection(url,user,password);
stmt = conn.createStatement();
}
/**
* 查询所有数据库
*/
@Test
public void showDatabases() throws Exception {
String sql = "show databases";
rs = stmt.executeQuery(sql);
while (rs.next()) {
System.out.println(rs.getString(1));
}
}
/**
* 创建数据库
*/
@Test
public void createDatabase() throws Exception{
String sql = "create database hive_jdbc";
stmt.execute(sql);
}
/**
* 查询所有表
*/
@Test
public void showTables() throws Exception{
String sql = "show tables";
rs = stmt.executeQuery(sql);
while (rs.next()){
System.out.println(rs.getString(1));
}
}
/**
* 加载数据
*/
@Test
public void loadData() throws Exception {
String filePath = "/home/hadoop/data/student.txt";
String sql = "load data local inpath '" + filePath + "' overwrite into table student";
stmt.execute(sql);
}
/**
* 查询数据
*/
@Test
public void selectData() throws Exception {
String sql = "select * from stduent";
rs = stmt.executeQuery(sql);
while (rs.next()) {
System.out.println(rs.getString("username") + "\t\t" + rs.getString("sex") + "\t\t" + rs.getString("grade"));
}
}
/**
* 释放资源
*/
@After
public void destory() throws Exception {
if ( rs != null) {
rs.close();
}
if (stmt != null) {
stmt.close();
}
if (conn != null) {
conn.close();
}
}
}
4. 错误
- Caused by: org.apache.thrift.TApplicationException: Required field ‘client_protocol’ is unset!
极可能的原因是你项目的hive-jdbc版本和服务器不一致的原因造成的,替换成和服务器一致的版本就可以了
- 要先启动hadoop,然后在启动hiveserver2,才能运行