安装hadoop,hive,
启动hadoop 和hive:
导入相关依赖:
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>1.2.1</version>
</dependency>
hivejdbc 工具类:
package com.utils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
public class HiveJDBC {
private static String driverName ="org.apache.hive.jdbc.HiveDriver";
private static String url="jdbc:hive2://192.168.220.128:10000/hive";
private static Connection conn;
public static Connection getConnnection()
{
try
{
Class.forName(driverName);
conn = DriverManager.getConnection(url,"root","root");
}
catch(ClassNotFoundException e) {
e.printStackTrace();
System.exit(1);
}
catch (SQLException e) {
e.printStackTrace();
}
return conn;
}
public static PreparedStatement prepare(Connection conn, String sql) {
PreparedStatement ps = null;
try {
ps = conn.prepareStatement(sql);
} catch (SQLException e) {
e.printStackTrace();
}
return ps;
}
}
测试:
package com.utils;
import org.junit.Before;
import org.junit.Test;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
public class HiveJDBCTest {
Connection conn = null;
PreparedStatement ps = null;
ResultSet rs = null;
@Before
public void start() {
conn = HiveJDBC.getConnnection();
}
@Test
public void testConnection(){
System.out.println(conn);
}
@Test
public void test() {
String sql = "select * from student";
try {
ps = HiveJDBC.prepare(conn, sql);
rs = ps.executeQuery();
int columns = rs.getMetaData().getColumnCount();
while (rs.next()) {
for (int i = 1; i <= columns; i++) {
System.out.print(rs.getString(i));
System.out.print("\t");
}
System.out.println();
}
} catch (SQLException e) {
e.printStackTrace();
}
}
@Test
public void prepare() {
}
}