hive jdbc/metastore客户端方式连接开启kerberos的hive集群api

pom依赖

<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-jdbc</artifactId>
    <version>3.1.2</version>
</dependency>
<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-metastore</artifactId>
    <version>3.1.2</version>
    <exclusions>
        <exclusion>
            <groupId>org.mortbay.jetty</groupId>
            <artifactId>*</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.eclipse.jetty</groupId>
            <artifactId>*</artifactId>
        </exclusion>
        <exclusion>
            <groupId>com.google.guava</groupId>
            <artifactId>guava</artifactId>
        </exclusion>
    </exclusions>
</dependency>

通过jdbc方式查询hive元数据信息

package kerberos;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import java.io.IOException;
import java.sql.*;

public class HiveJdbcKerberosApi {

    public static void main(String[] args) throws SQLException, IOException, ClassNotFoundException {

        System.setProperty("java.security.krb5.conf", "D:/cy/keytab/krb5.conf");
//        System.setProperty("sun.security.krb5.debug", "true");
//        System.setProperty("HADOOP_JAAS_DEBUG","true");

        String jdbcUrl="jdbc:hive2://henghe-33:10000/default;principal=henghe/henghe-33@HENGHE.COM;?hive.client.keytab.file=D:/cy/keytab/henghe.tenant.keytab;" +
                "hive.client.kerberos.principal=henghe@HENGHE.COM";

        Configuration configuration = new Configuration();
        configuration.set("hadoop.security.authentication", "kerberos");

        configuration.set("hive.metastore.uris", "thrift://henghe-33:9083");

        UserGroupInformation.setConfiguration(configuration);
        UserGroupInformation.loginUserFromKeytab("henghe@HENGHE.COM", "D:/cy/keytab/henghe.tenant.keytab");

        Class.forName("org.apache.hive.jdbc.HiveDriver");


//        testHiveDatabase(jdbcUrl);
//
        testHiveTable(jdbcUrl,"test");


       testHiveColumn(jdbcUrl,"test","default");
    }


    public static void testHiveTable(String jdbcUrl,String databaseName) throws SQLException {
        Connection connection=
                DriverManager.getConnection(jdbcUrl);

        //获取metadata
        DatabaseMetaData metaData = connection.getMetaData();

        ResultSet rs2 =
                metaData.getTables(databaseName, databaseName, null, new String[]{"TABLE"});

        while (rs2.next()) {
          String tableName=rs2.getString("TABLE_NAME");
          System.out.println("table------"+tableName);
        }
    }


    public static void testHiveDatabase(String jdbcUrl) throws SQLException {
        Connection connection=
                DriverManager.getConnection(jdbcUrl);

        //获取metadata
        DatabaseMetaData metaData = connection.getMetaData();
        //创建一个Statement对象
        Statement stmt = connection.createStatement();
        //检索数据
        ResultSet rs = stmt.executeQuery("show databases");
//        ResultSet rs1 = metaData.getCatalogs();
        while(rs.next()) {
            System.out.print("databases----"+rs.getString(1));
            System.out.println();
        }
    }


    public static void testHiveColumn(String jdbcUrl,String tableName,String databaseName) throws SQLException {
        Connection connection=
                DriverManager.getConnection(jdbcUrl);

        //获取metadata
        DatabaseMetaData metaData = connection.getMetaData();
        //创建一个Statement对象
        ResultSet rs2 =
                metaData.getColumns("default", "default", "test",null);

        while (rs2.next()) {
            String columnName=rs2.getString("COLUMN_NAME");
            String remark=rs2.getString("REMARKS");
            Integer digits=rs2.getInt("DECIMAL_DIGITS");
            System.out.println("column---"+columnName);
            System.out.println("remarks---"+remark);
            System.out.println("digit---"+digits);
            System.out.println("MaxLength--"+rs2.getInt("COLUMN_SIZE"));
            System.out.println("TYPE_NAME--"+rs2.getString("TYPE_NAME"));
            System.out.println("auto---"+rs2.getString("is_auto_increment"));
            System.out.println("index--"+rs2.getInt("ORDINAL_POSITION"));
        }
    }

}

通过mestoreClient获取hive元数据信息

package kerberos;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.thrift.TException;

import java.io.IOException;
import java.util.List;
import java.util.Map;

public class HiveKerberosApi {

    private static final String USER_NAME = "user.name";
    // Some Hive Metastore properties

    public static void  main(String[] args){
        HiveKerberosApi test=new HiveKerberosApi();
        try {
            test.testHiveColumn();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public void testHiveColumn() throws TException {

//        System.setProperty("java.security.krb5.conf", "D:\\kerberos\\krb5.conf");
        System.setProperty("java.security.krb5.conf", "D:/cy/keytab/krb5.conf");
        System.setProperty("sun.security.krb5.debug", "true");
        System.setProperty("HADOOP_JAAS_DEBUG","true");


        HiveConf conf=new HiveConf(this.getClass());
//        conf.set("hive.metastore.uris", "thrift://henghe-131:9083");
        conf.set("hive.metastore.uris", "thrift://henghe-33:9083");
        conf.set("hadoop.security.authentication", "kerberos");
        //todo 默认为true
        conf.set("hive.metastore.execute.setugi","true");
        conf.set("hive.security.authorization.enabled", "false");
        conf.set("hive.metastore.sasl.enabled","true");
        conf.set("hive.metastore.kerberos.principal", "henghe/henghe-33@HENGHE.COM");
        conf.set("hive.server2.authentication.kerberos.principal","henghe/henghe-33@HENGHE.COM");
        UserGroupInformation.setConfiguration(conf);
        try {
            UserGroupInformation.loginUserFromKeytab("henghe@HENGHE.COM", "D:/cy/keytab/henghe.tenant.keytab");
        } catch (IOException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }

        HiveMetaStoreClient client=new HiveMetaStoreClient(conf);
        List<String> aDefault = client.getAllDatabases();
        System.out.println(aDefault.get(0));
    }

}

评论 4
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值