hive2.3.7 jdbc连接的方式

准备工作

修改hive-site

<?xml version="1.0" encoding="UTF-8" standalone="no"?><?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
        <property>
                <name>javax.jdo.option.ConnectionURL</name>
                <value>jdbc:mysql://192.168.134.154:3306/hive</value>
        </property>

        <property>
                <name>javax.jdo.option.ConnectionDriverName</name>
                <value>com.mysql.jdbc.Driver</value>
        </property>

        <property>
                <name>javax.jdo.option.ConnectionUserName</name>
                <value>root</value>
        </property>

        <property>
                <name>javax.jdo.option.ConnectionPassword</name>
                <value>root</value>
        </property>

        <property>
                <name>hive.metastore.schema.verification</name>
                <value>false</value>
        </property>
	<property> 
   		 <name>hive.cli.print.current.db</name>
		 <value>true</value>
	</property>
	<property> 
	         <name>hive.cli.print.header</name>
	         <value>true</value>
	</property>
	<!-- 这是hiveserver2 -->
	<property>
       		 <name>hive.server2.thrift.port</name>
     		 <value>10000</value>
	</property>

    	<property>
       		<name>hive.server2.thrift.bind.host</name>
       		<value>192.168.134.154</value>
     	</property>

</configuration>

如果连接不上10000
修改/hadoop/etc/hadoop 中的 core-site.xml

<property>     
	<name>hadoop.proxyuser.root.hosts</name>     
	<value>*</value>
 </property> 
<property>     
	<name>hadoop.proxyuser.root.groups</name>    
    <value>*</value> 
</property>
<property>     
	<name>hadoop.proxyuser.用户组名.hosts</name>     
	<value>*</value> 
</property> 
<property>     
	<name>hadoop.proxyuser.用户组名.groups</name>     
	<value>*</value> 
</property>
重启Hadoop
stop-dfs.sh
start-dfs.sh

开启

hive --service metastore &
hive --service hiveserver2 &

检查是否开启

netstat -natp|grep ${port}

建议:用beeline连接hive可以知道是什么问题没启动
步骤:

新建终端 输入beeline【若没配hive环境变量,就去./bin目录下启动】
!connect jdbc:hive2://${server}:${port}

启动错误参考

关闭服务

jps查看runhar
ps -ef|grep hiveserver2

或者netstat -nat|grep 10000

然后kill -9

代码

<!-- 添加hive依赖 -->
<dependency>
	<groupId>org.apache.hive</groupId>
	<artifactId>hive-jdbc</artifactId>
	<version>${hive-version}</version>
	<exclusions>
		<exclusion>
			<groupId>org.eclipse.jetty.aggregate</groupId>
			<artifactId>*</artifactId>
		</exclusion>
	</exclusions>
</dependency>
    
<dependency> 
        <groupId>org.apache.hadoop</groupId> 
        <artifactId>hadoop-common</artifactId> 
       <version>${hadoop-version}</version> 
</dependency> 

package hive_test;

import java.sql.Connection;import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
 
public class test {
    private static String driverName ="org.apache.hive.jdbc.HiveDriver";   // 此Class 位于 hive-jdbc的jar包下
    private static String Url="jdbc:hive2://222.22.91.81:10000/";    //填写hive的IP,之前在配置文件中配置的IP
    private static Connection conn;
    public static Connection getConnnection()
    {
        try
               {
                  Class.forName(driverName);
                  conn = DriverManager.getConnection(Url,"","");        //只是连接hive, 用户名可不传
               }
        catch(ClassNotFoundException e)  {
                   e.printStackTrace();
                   System.exit(1);
                }
         catch (SQLException e) {
            e.printStackTrace();
        }
        return conn;
    }
    public static PreparedStatement prepare(Connection conn, String sql) {
        PreparedStatement ps = null;
        try {
            ps = conn.prepareStatement(sql);
        } catch (SQLException e) {
            e.printStackTrace();
        }
        return ps;
    }
}


package hive_test;

import java.sql.Connection;import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
 
public class test2 {
    private static Connection conn=test.getConnnection();
    private static PreparedStatement ps;
    private static ResultSet rs;
    public static void getAll(String tablename)
    {
        String sql="select * from "+tablename;
        System.out.println(sql);
        try {
            ps=test.prepare(conn, sql);
            rs=ps.executeQuery();
            int columns=rs.getMetaData().getColumnCount();
            while(rs.next())
            {
                for(int i=1;i<=columns;i++)
                {
                    System.out.print(rs.getString(i));  
                    System.out.print("\t\t");
                }
                System.out.println();
            }
        } catch (SQLException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
 
    }

	public static void main(String[] args) { 
		String tablename="students";
        test2.getAll(tablename);
    }
 
}

hive终端优化

如果你想在hive终端看到当前数据库名,还有查询结果上有列名称,可修改hive-site.xml

 <property>
    <name>hive.cli.print.header</name>
    <value>true</value>
  </property>
  <property>
    <name>hive.resultset.use.unique.column.names</name>
    <value>false</value>
  </property>
   <property>
    <name>hive.cli.print.current.db</name>
    <value>true</value>
  </property>
效果

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值