Hive(18):java jdbc client连接hiveserver2

一、实现功能

使用java代码连接hive,进行相关操作。

二、环境依赖

1.环境

(1)CDH5.7.0

(2)centos6.4

2.官网地址

https://cwiki.apache.org/confluence/display/Hive/HiveServer2+Clients#HiveServer2Clients-JDBC

3.pom依赖【cdh5.7.0】

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>BG</groupId>
    <artifactId>Hadoop</artifactId>
    <version>1.0-SNAPSHOT</version>


    <repositories>
        <repository>
            <id>cloudera</id>
            <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
        </repository>
    </repositories>
    <properties>
        <hadoop.version>2.6.0-cdh5.7.0</hadoop.version>
        <hbase.version>1.2.0-cdh5.7.0</hbase.version>
        <hive.version>1.1.0-cdh5.7.0</hive.version>

        <maven.compiler.source>1.8</maven.compiler.source>
        <maven.compiler.target>1.8</maven.compiler.target>
    </properties>

    <dependencies>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>${hadoop.version}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>${hadoop.version}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs</artifactId>
            <version>${hadoop.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-jdbc</artifactId>
            <version>${hive.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-metastore</artifactId>
            <version>${hive.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-exec</artifactId>
            <version>${hive.version}</version>
        </dependency>


        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>4.10</version>
            <scope>test</scope>
        </dependency>


    </dependencies>

</project>

三、相关组件启动

1.启动hdfs

2.启动metastore

bin/hive --service metastore &

3.启动hiveserver2

bin/hive --service hiveserver2 &

4.使用shell命令检查是否连接

bin/beeline -u jdbc:hive2://hadoop01:10000 -n root -p 123456

四、代码

1.apache版本代码

(1)四要素

driver:org.apache.hive.jdbc.HiveDriver

url:jdbc:hive2://hadoop:10000/default

username:hadoop

password:hadoop

(2)代码(apache版本)

package com.hadoop.hive.jdbc;

import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;
 
public class HiveJdbcClient {
	//hiveserver2的驱动包
  private static final String DRIVERNAME= "org.apache.hive.jdbc.HiveDriver";
 
  /**
   * @param args
   * @throws SQLException
   */
  public static void main(String[] args) throws SQLException {
      //第1要素:driver
	  try {
      Class.forName(DRIVERNAME);
    } catch (ClassNotFoundException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
      System.exit(1);
    }
	  //第2要素:url
	 String url="jdbc:hive2://hadoop:10000/default";
	  //第3要素:user="root" ->hiveserver2不检查,所以随意!
	 String username="hadoop";
	 //第4要素:password ->hiveserver2不检查,所以随意!
    String password="hadoop";
	 //replace "hive" here with the name of the user the queries should run as
    Connection con = DriverManager.getConnection(url, username, "");
    Statement stmt = con.createStatement();
    String tableName = "emp";

    String sql = "select deptno,count(1) as count from emp group by deptno";
    System.out.println("Running: " + sql);
    ResultSet res = stmt.executeQuery(sql);
    while (res.next()) {
//      System.out.println(String.valueOf(res.getInt(1)) + "\t" + res.getString(2));
    
    	System.out.println(String.valueOf(res.getInt(1)) + "\t" + res.getInt(2));
    }

  }
}

2.cdh5.7.0代码

package hive;

import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;

public class hiveread {
    //hiveserver2的驱动包
    private static final String DRIVERNAME= "org.apache.hive.jdbc.HiveDriver";

    /**
     * @param args
     * @throws SQLException
     */
    public static void main(String[] args) throws SQLException {
        //第1要素:driver
        try {
            Class.forName(DRIVERNAME);
        } catch (ClassNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
            System.exit(1);
        }
        //第2要素:url
        String url="jdbc:hive2://hadoop01:10000/test";
        //第3要素:user="root" ->hiveserver2不检查,所以随意!
        String username="hadoop";
        //第4要素:password ->hiveserver2不检查,所以随意!
        String password="hadoop";
        //replace "hive" here with the name of the user the queries should run as
        Connection con = DriverManager.getConnection(url, username, "");
        Statement stmt = con.createStatement();
        String tableName = "emp";

        String sql = "select * from student_mysql";

        System.out.println("Running: " + sql);
        ResultSet res = stmt.executeQuery(sql);
        while (res.next()) {
            System.out.println(String.valueOf(res.getInt(1)) + "\t" + res.getString(2)+ "\t" + res.getInt(3));
        }

    }
}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值