eclipse上运行hive查询

环境:hadoop:2.5.2,hive:1.2.1

启动haoop和hive:

[root@centOS1 ~]# nohup hive --service hiveserver2 &


1、pom文件:

<dependency>
	    <groupId>org.apache.hadoop</groupId>
	    <artifactId>hadoop-common</artifactId>
	    <version>2.5.2</version>
	    <scope>provided</scope>
	</dependency> 
	<!-- https://mvnrepository.com/artifact/org.apache.hive/hive-jdbc -->
	<dependency>
	    <groupId>org.apache.hive</groupId>
	    <artifactId>hive-jdbc</artifactId>
	    <version>1.2.1</version>
	</dependency>

2、java代码

package cn.edu.nuc.hive;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;

public class HiveJDBCTest {
	//private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
	private static String driverName = "org.apache.hive.jdbc.HiveDriver";
	private static String tableName = "test"; 
	
	public static void main(String...strings) throws SQLException {
		try {  
            Class.forName(driverName);  
        } catch (ClassNotFoundException e) {  
            e.printStackTrace();  
            System.exit(1);  
        }  
		
		Connection con = DriverManager.getConnection("jdbc:hive2://centos1:10000/default", "", "");
		Statement stmt = con.createStatement();  
         
        /*stmt.execute("drop table if exists " + tableName);  
        stmt.execute("create table " + tableName +   
                                     " (key int, value string)");  
        System.out.println("Create table success!");  */
        // show tables  
        String sql = "show tables '" + tableName + "'";  
        System.out.println("Running: " + sql);  
        ResultSet res = stmt.executeQuery(sql);  
        if (res.next()) {  
            System.out.println(res.getString(1));  
        }  
        
        // describe table  
        /*sql = "describe " + tableName;  
        System.out.println("Running: " + sql);  
        res = stmt.executeQuery(sql);  
        while (res.next()) {  
            System.out.println(res.getString(1) + "\t" + res.getString(2));  
        }  */
  
  
        sql = "select * from " + tableName;  
        res = stmt.executeQuery(sql);  
        while (res.next()) {  
            System.out.println(String.valueOf(res.getInt(1)) + "\t"  
                                               + res.getString(2));  
        }  
  
        sql = "select count(1) from " + tableName;  
        System.out.println("Running: " + sql);  
        res = stmt.executeQuery(sql);  
        while (res.next()) {  
            System.out.println(res.getString(1));  
        }  
	}
}

问题:

1)java.lang.ClassNotFoundException: org.apache.hadoop.hive.jdbc.HiveDriver
解决:
private static String driverName = "org.apache.hive.jdbc.HiveDriver"
instead of
private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";


2)Exception in thread "main" java.sql.SQLException: No suitable driver found for jdbc:hive://centos1:10000/default

改成:
Connection con = 
  DriverManager.getConnection("jdbc:hive2://myserver.example.com:10000/default", "<user>", "");

3)FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
hadoop权限问题,由于运行hive程序可能要跑mapreduce程序,所以通过查看hive日志(默认在/tmp/{user}/hive.log)可以发现/tmp目录没有权限

2016-11-26 16:54:12,850 ERROR [HiveServer2-Background-Pool: Thread-78]: exec.Task (SessionState.java:printError(960)) - Job Submission failed with exception 'org.apache.hadoop.security.AccessControlException(Permission denied: user=anonymous, access=EXECUTE, inode="/tmp/hadoop-yarn":root:supergroup:drwx------
	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkFsPermission(FSPermissionChecker.java:271)
	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:257)
	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:208)
	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:171)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:5904)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getFileInfo(FSNamesystem.java:3691)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getFileInfo(NameNodeRpcServer.java:803)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getFileInfo(ClientNamenodeProtocolServerSideTranslatorPB.java:779)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:585)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:928)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2013)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2009)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1614)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2007)
)'
解决: hadoop dfs -chmod -R  777  /tmp

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值