package com.linewell.hivetest;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
public class HiveTest {
private static String Driver = "org.apache.hive.jdbc.HiveDriver";
private static String url = "jdbc:hive2://192.168.72.129:10000/default?useUnicode=true&characterEncoding=UTF-8";
private static String name = "root";
private static String password = "xxxxxxxx";
public static void main(String[] args) {
try {
Class.forName(Driver);
Connection conn = DriverManager.getConnection(url,name,password);
Statement stat =conn.createStatement();
String sql ="show databases";
ResultSet rs = stat.executeQuery(sql);
while (rs.next()) {
System.out.println(rs.getString(1));
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
跑程序时出现问题:
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/E:/%e8%bd%af%e4%bb%b6%e5%a4%87%e4%bb%bd/apache-hive-2.1.1-bin/lib/log4j-slf4j-impl-2.4.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/E:/%e8%bf%85%e9%9b%b7%e4%b8%8b%e8%bd%bd/hadoop-2.8.0/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]
java.sql.SQLException: Could not open client transport with JDBC Uri: jdbc:hive2://192.168.72.129:10000/default?useUnicode=true&characterEncoding=UTF-8: Failed to open new session: java.lang.RuntimeException: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.authorize.AuthorizationException): Unauthorized connection for super-user: root from IP 192.168.72.129
at org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:209)
at org.apache.hive.jdbc.HiveDriver.connect(HiveDriver.java:107)
at java.sql.DriverManager.getConnection(DriverManager.java:664)
at java.sql.DriverManager.getConnection(DriverManager.java:247)
at com.linewell.hivetest.HiveTest.main(HiveTest.java:18)
Caused by: org.apache.hive.service.cli.HiveSQLException: Failed to open new session: java.lang.RuntimeException: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.authorize.AuthorizationException): Unauthorized connection for super-user: root from IP 192.168.72.129
解决办法:在core-site.xml 下添加
<property>
<name>hadoop.proxyuser.root.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.root.hosts</name>
<value>*</value>
</property>
然后重启hadoop服务。
运行java程序
成功!
注意点
1.导入包要完整。
2.要开启hadoop集群,应为hive是基于hadoop的。
3. 启动 metastore 和hiveserver 启动命令:
hive --service metastore &
hive --service hiveserver & 或者 hive --service hivesever2 &
启动hiveserver 或者 hivesever2 它们之间的不同有启动命令 性能 驱动名 创建连接不一样,建议用hiveserver2.