1、Ranger中对Hive的库、表、列进行授权
在做Hive的访问权限控制的时候,Ranger中对hive中添加了如下Policy权限控制。配置方式如下:
2、默认情况下,Ambari中hiveserver2的HiveServer2 Authentication默认是None,如下:
所以发现在服务器上进行访问hive的时候(类似:hive -n userName -p pwd),发现pwd随便写,都可以进入hive中。这个显然是有问题的。
为了解决上面的问题,这里,我们自己定义HiveServer2 Authentication的认证方式。
3、建立custome认证hive server2的工程,工程结构如下:
3.1 pom.xml如下
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.xxx</groupId>
<artifactId>custom-hiveserver2-auth</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<mysql.jdbc.version>5.1.34</mysql.jdbc.version>
<hadoop-common.version>3.1.0</hadoop-common.version>
<hive-common.version>3.1.0</hive-common.version>
<commons-logging.version>1.2</commons-logging.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop-common.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-common</artifactId>
<version>${hive-common.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-service</artifactId>
<version>${hive-common.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>${commons-logging.version}</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.jdbc.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
</project>
3.2 custom.auth.jdbc.properties的内容如下:
mysql.jdbc.url=jdbc:mysql://xxx.xxx.xxx.xxx:3306/xxx?autoReconnect=true&useUnicode=true&characterEncoding=utf8
mysql.jdbc.user=root
mysql.jdbc.password=xxx
mysql.jdbc.driverClass=com.mysql.jdbc.Driver
3.3 MySqlJdbcUtils的内容如下:
package com.xxx.hive.custom.utils;
import java.io.FileInputStream;
import java.io.InputStream;
import java.sql.*;
import java.util.Properties;
/**
* <p>
* 功能:连接mysql的公共工具类
* </p>
*
* @author tuzq
* Copyright 2018 xxx.com, Inc. All rights reserved
* @version v1.0
*/
public class MySqlJdbcUtils {
/** 数据库url **/
private static String URL = null;
/** 数据库用户名 **/
private static String USER = null;
/** 密码 **/
private static String PWD = null;
/** 数据库的driver **/
private static String DRIVER_CLASS = null;
public MySqlJdbcUtils(String jdbcConfigFile) {
try {
Properties prop = new Properties();
prop.load(new FileInputStream(jdbcConfigFile));
URL = prop.getProperty("mysql.jdbc.url");
USER = prop.getProperty("mysql.jdbc.user");
PWD = prop.getProperty("mysql.jdbc.password");
DRIVER_CLASS = prop.getProperty("mysql.jdbc.driverClass");
//注册驱动
Class.forName(DRIVER_CLASS);
} catch (Exception e) {
throw new ExceptionInInitializerError(e);
}
}
/**
* 获取与指定数据库的连接
* @return 获取连接
* @throws SQLException 获取连接异常
*/
public Connection getConnection() throws SQLException {
Connection connection = DriverManager.getConnection(URL,USER,PWD);
return connection;
}
/**
* 释放资源
* @param rs :结果集对象
* @param stmt :Statement
* @param conn :连接
*/
public void release(ResultSet rs, Statement stmt,Connection conn) {
//判断结果集是否为空,如果不为空,关闭清空
if (null != rs) {
try {
rs.close();
} catch (Exception e) {
e.printStackTrace();
}
rs = null;
}
if (null != stmt) {
try {
stmt.close();
} catch (Exception e) {
e.printStackTrace();
}
stmt = null;
}
if (null != conn) {
try {
conn.close();
} catch (Exception e) {
e.printStackTrace();
}
conn = null;
}
}
}
3.4 CustomHiveServer2Auth的内容如下
package com.xxx.hive.custom.auth;
import com.xxx.hive.custom.utils.MySqlJdbcUtils;
import com.xxx.hive.custom.utils.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hive.service.auth.PasswdAuthenticationProvider;
import javax.security.sasl.AuthenticationException;
import java.sql.*;
public class CustomHiveServer2Auth implements PasswdAuthenticationProvider {
private static final Log log = LogFactory.getLog(CustomHiveServer2Auth.class);
@Override
public void Authenticate(String username, String password)
throws AuthenticationException {
HiveConf hiveConf = new HiveConf();
Configuration conf = new Configuration(hiveConf);
String filePath = conf.get("hive.server2.custom.authentication.jdbc.config.path");
log.info("hive.server2.custom.authentication.jdbc.config.path = " + filePath);
if (StringUtils.isBlank(filePath)) {
throw new AuthenticationException("jdbc config path is null");
}
Connection conn = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
Boolean flag = false;
MySqlJdbcUtils jdbcUtils = new MySqlJdbcUtils(filePath);
try {
conn = jdbcUtils.getConnection();
String sql = "select " +
" ranger_user_name, " +
" ranger_password, " +
" exec_password" +
" FROM " +
" xxx" +
" WHERE " +
" ranger_user_name = ? and exec_password = ?";
pstmt = conn.prepareStatement(sql);
pstmt.setString(1,username);
pstmt.setString(2,password);
rs = pstmt.executeQuery();
while (rs.next()) {
String name = rs.getString("ranger_user_name");
String pwd = rs.getString("exec_password");
if (StringUtils.isNotBlank(name) && StringUtils.isNotBlank(pwd) && name.equals(username) && pwd.equals(password)) {
flag = true;
}
}
} catch (Exception e) {
throw new AuthenticationException("认证hive用户名和密码错误", e);
} finally {
jdbcUtils.release(rs,pstmt,conn);
}
if (!flag) {
throw new AuthenticationException("认证hive用户名和密码错误");
}
}
}
3.5 将此工程打包
然后将该custom-hiveserver2-auth-1.0-SNAPSHOT.jar包放到/usr/hdp/current/hive-client/lib下,并且远程同步到hadoop4,hadoop5,hadoop6上。
3.6 上传custom.auth.jdbc.properties
将custom.auth.jdbc.properties放到/usr/hdp/current/hive-client/conf目录下。(注意的是每台机器上都要有)
4 配置hive相关,做如下配置:
上面的配置相当于是在hive-site.xml中做如下配置:
<property>
<name>hive.server2.authentication</name>
<value>CUSTOM</value>
</property>
<property>
<name>hive.server2.custom.authentication.class</name>
<value>com.xxx.hive.custom.auth.CustomHiveServer2Auth</value>
</property>
<property>
<name>hive.server2.custom.authentication.jdbc.config.path</name>
<value>/usr/hdp/current/hive-client/conf/custom.auth.jdbc.properties</value>
</property>
经过上面的配置之后,一定要重新启动hiveserver2,否则会发现自定义HiveServer2 Authentication Custom的功能不起作用。
打个赏呗,您的支持是我坚持写好博文的动力。