大数据平台权限设计分享-Hive库表权限

大数据平台权限设计分享-Hive库表权限

 

权限目标

可通过界面管理用户访问hive库表权限,权限设计模型如下图

 

权限模型-思维导图.png

  • 用户拥有hive库表权限列表

     

    image.png

  • 新增hive库权限

     

    image.png

     

    -新增hive表权限

     

    image.png


    -修改hive库表权限
    -删除hive库表权限

实现思路

在hive中执行sql之前,对sql语句进行解析,获取当前操作的类型,如select,drop,update等,当前操作的库表,当前操作的用户,调用权限校验服务,通过后在进行后续语句执行。
修改hive-site.xml配置文件

<!--权限校验 -->
    <property>
        <name>hive.server2.authentication</name>
        <value>CUSTOM</value>
    </property>
    <property>
        <name>hive.server2.custom.authentication.class</name>
        <value>org.apache.hive.permissions.check.BeelineCustomerAuth</value>
    </property>
    <property>
        <name>hive.semantic.analyzer.hook</name>
        <value>org.apache.hive.permissions.check.MyAuthorityHook</value>
    </property>
    <property>
        <name>permissions.cluster.type</name>
        <value>SPARK</value>
    </property>
    <property>
        <name>permissions.check.url</name>
        <value>http://192.168.94.1:8120/legend-permissions-spring/app/acl/permission/api/check</value>
    </property>

MyAuthorityHook类如下:

package org.apache.hive.permissions.check;

import org.apache.common.permissions.model.CheckPermissionModel;
import org.apache.common.permissions.model.ClusterType;
import org.apache.common.permissions.model.RequestType;
import org.apache.common.permissions.model.Result;
import org.apache.common.permissions.utils.HttpTools;
import org.apache.common.permissions.utils.JacksonTools;
import org.apache.hadoop.hdfs.server.namenode.permissions.check.PermissionsConstants;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.*;

public class MyAuthorityHook extends AbstractSemanticAnalyzerHook {

    public  final Logger log = LoggerFactory.getLogger(MyAuthorityHook.class);

    private final static String SUPER_USER_NAME = "hadoop";

    /**
     * 表示失败
     */
    public final static String ZORE = "0";

    @Override
    public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException {
        try {
            Properties properties = context.getHive().getConf().getAllProperties();
            String checkUrl = properties.getProperty(PermissionsConstants.PERMISSIONS_CHECK_URL);
            String clusertType = properties.getProperty(PermissionsConstants.CLUSTER_TYPE);


            String command = context.getCommand();
            String sessionUserName = "";
            if (SessionState.get() != null && SessionState.get().getAuthenticator() != null) {
                sessionUserName = SessionState.get().getAuthenticator().getUserName();
            }

            if (SUPER_USER_NAME.equals(sessionUserName)) {
                throw new SemanticException("permissions error , not allow hadoop user op...");
            }

            CheckPermissionModel checkPermissionModel = new CheckPermissionModel();
            checkPermissionModel.setCommand(command);
            checkPermissionModel.setUserName(sessionUserName);
            checkPermissionModel.setClusterType(ClusterType.valueOf(clusertType));
            checkPermissionModel.setRequestType(RequestType.HIVE_REQUEST);

            Database databaseCurrent = context.getHive().getDatabaseCurrent();

            String databaseCurrentName = databaseCurrent.getName();
            checkPermissionModel.setDatabaseCurrentName(databaseCurrentName);

            Map<String, Map<String, Table>> tempTables = SessionState.get().getTempTables();
            Set<String> strings = tempTables.keySet();


            log.info("---------------");
            log.info("------sessionUserName---------" + sessionUserName);
            log.info("---command----" + command);
            log.info("------databaseCurrentName---------" + databaseCurrentName);

            List<String> tempViews = new ArrayList<String>();
            for (String key : strings) {
                Map<String, Table> stringTableMap = tempTables.get(key);
                Set<String> strings1 = stringTableMap.keySet();
                for (String str : strings1) {
                    Table table = stringTableMap.get(str);
                    String tableName = table.getTableName();
                    String dbName = table.getDbName();
                    log.info("------tableName---------" + tableName + " ---dbName--" + dbName);
                    tempViews.add(dbName.toLowerCase() + "." + tableName.toLowerCase());
                }
            }
            checkPermissionModel.setTempViews(tempViews);
            log.info("---------------");

            String post = HttpTools.sendPost(checkUrl, JacksonTools.obj2json(checkPermissionModel));
            Result result = JacksonTools.json2pojo(post, Result.class);
            String status = result.getStatus();
            if (status.equals(ZORE)) {
                throw new SemanticException("validate permissions has error...");
            }

            Object obj = result.getObj();
            CheckPermissionModel model = JacksonTools.json2pojo(JacksonTools.obj2json(obj), CheckPermissionModel.class);

            boolean validateFlag = model.isValidateFlag();
            if (!validateFlag) {
                throw new SemanticException("user: " + model.getUserName() + " has not permissions..." + " error message :" + model.getMsg() + " excute command : " + model.getCommand());
            }

            if (validateFlag) {
                return ast;
            }

        } catch (Exception e) {
            e.printStackTrace();
        }
        return null;
    }
}

BeelineCustomerAuth类如下:

package org.apache.hive.permissions.check;

import org.apache.hive.service.auth.PasswdAuthenticationProvider;

import javax.security.sasl.AuthenticationException;

public class BeelineCustomerAuth implements PasswdAuthenticationProvider {

    private final static String SUPER_USER_NAME = "hadoop";

    @Override
    public void Authenticate(String userName, String password) throws AuthenticationException {

        if (userName == null || userName == "") {
            throw new AuthenticationException("user is null,please use -n name way");
        }

        if (!SUPER_USER_NAME.equals(userName)) {
            return;
        }

        throw new AuthenticationException("permissions error , not allow hadoop user op...");
    }
}

为兼容spark sql语法,这里对sql语句的解析,使用了spark组件对sql的解析模块涉及类ResolvedLogicPlan,MySqlParse,OperateTypeEnum,ResolvedOperate,Table几个类,可参见spark sql解析源码

package cn.com.legend.permissions.sql

import java.util.ArrayList
import org.antlr.v4.runtime.tree.xpath.XPath
import org.apache.logging.log4j.{LogManager, Logger}
import org.apache.spark.sql.catalyst.parser.SqlBaseParser._
import scala.collection.JavaConversions.collectionAsScalaIterable

/**
  * 解析sql中的操作类型,及操作对象
  **/
class ResolvedLogicPlan {

  protected var logger: Logger = LogManager.getLogger(getClass)

  def parseSql(sql: String): ArrayList[ResolvedOperate] = {
    val mySqlParse = new MySqlParse();
    mySqlParse.parse(sql) { parser =>
      parser.statement() match {
        //USE db
        case statement: UseContext =>
          val dbName = statement.identifier().getText
          logger.info("this is a UseContext statement,database is " + dbName);
          otherOperate();
        case statement: ShowTableContext =>
          logger.info("this is a ShowTableContext statement");
          otherOperate();
        case statement: CacheTableContext =>
          logger.info("this is a CacheTableContext statement");
          otherOperate();
        case statement: UncacheTableContext =>
          logger.info("this is a UncacheTableContext statement");
          otherOperate();
        case statement: ClearCacheContext =>
          logger.info("this is a ClearCacheContext statement");
          otherOperate();
        case statement: ExplainContext =>
          logger.info("this is a ExplainContext statement");
          otherOperate();
        case statement: RefreshResourceContext =>
          logger.info("this is a RefreshResourceContext statement");
          otherOperate();
        case statement: RefreshTableContext =>
          logger.info("this is a RefreshTableContext statement");
          otherOperate();
        case statement: CreateTempViewUsingContext =>
          logger.info("this is a CreateTempViewUsingContext statement");
          var dbName = "";
          if (statement.tableIdentifier().db != null) {
            dbName = statement.tableIdentifier().db.getText;
          }
          val tableName = statement.tableIdentifier().table.getText;
          writeOperate(dbName,tableName);
        case statement: SetConfigurationContext =>
          logger.info("this is a SetConfigurationContext statement");
          otherOperate();
        case statement: ResetConfigurationContext =>
          logger.info("this is a ResetConfigurationContext statement");
          otherOperate();
        case statement: ShowTblPropertiesContext =>
          logger.info("this is a ShowTblPropertiesContext statement");
          otherOperate();
        case statement: DescribeFunctionContext =>
          logger.info("this is a DescribeFunctionContext statement");
          otherOperate();
        case statement: DescribeTableContext =>
          logger.info("this is a DescribeTableContext statement");
          otherOperate();
        case statement: ShowFunctionsContext =>
          logger.info("this is a ShowFunctionsContext statement");
          otherOperate();
        case statement: ShowColumnsContext =>
          logger.info("this is a ShowColumnsContext statement");
          otherOperate();
        case statement: ShowCreateTableContext =>
          logger.info("this is a ShowCreateTableContext statement");
          otherOperate();
        case statement: ShowDatabasesContext =>
          logger.info("this is a ShowDatabasesContext statement");
          otherOperate();
        case statement: ShowPartitionsContext =>
          logger.info("this is a ShowPartitionsContext statement");
          otherOperate();
        case statement: ShowTablesContext =>
          logger.info("this is a ShowTablesContext statement");
          otherOperate();
        case statement: AnalyzeContext =>
          logger.info("this is a AnalyzeContext statement");
          otherOperate();
        case statement: DescribeDatabaseContext =>
          logger.info("this is a DescribeDatabaseContext statement");
          otherOperate();
        case statement: SetDatabasePropertiesContext =>
          logger.info("this is a SetDatabasePropertiesContext statement");
          adminOperate();
        case statement: DropDatabaseContext =>
          val dbName = statement.identifier().getText
          logger.info("this is a DropDatabaseContext statement,db is " + dbName);
          adminOperate();
        case statement: CreateDatabaseContext =>
          val dbName = statement.identifier().getText
          logger.info("this is a CreateDatabaseContext statement,db is " + dbName);
          adminOperate();
        case statement: CreateFunctionContext =>
          logger.info("this is a CreateFunctionContext statement");
          adminOperate();
        case statement: DropFunctionContext =>
          logger.info("this is a DropFunctionContext statement");
          adminOperate();
        case statement: CreateHiveTableContext =>
          logger.info("this is a CreateHiveTableContext statement");
          var dbName = "";
          if (statement.createTableHeader().tableIdentifier().db != null) {
            dbName = statement.createTableHeader().tableIdentifier().db.getText;
          }
          val tableName = statement.createTableHeader().tableIdentifier().table.getText;
          createOperate(dbName, tableName);
        case statement: CreateTableLikeContext =>
          logger.info("this is a CreateTableLikeContext statement");
          var dbName = "";
          if (statement.target.db != null) {
            dbName = statement.target.db.getText
          }
          val tableName = statement.target.table.getText;
          createOperate(dbName, tableName);
        case statement: CreateViewContext =>
          logger.info("this is a CreateViewContext statement");
          otherOperate();
        case statement: CreateTableContext =>
          logger.info("this is a CreateTableContext statement");
          var dbName = "";
          if (statement.createTableHeader().tableIdentifier().db != null) {
            dbName = statement.createTableHeader().tableIdentifier().db.getText;
          }
          val tableName = statement.createTableHeader().tableIdentifier().table.getText;
          createOperate(dbName, tableName);
        case statement: DropTableContext =>
          logger.info("this is a DropTableContext statement");
          var dbName = "";
          if (statement.tableIdentifier().db != null) {
            dbName = statement.tableIdentifier().db.getText;
          }
          val tableName = statement.tableIdentifier().table.getText;
          dropOperate(dbName, tableName);
        case statement: DropTablePartitionsContext =>
          logger.info("this is a DropTablePartitionsContext statement");
          var dbName = "";
          if (statement.tableIdentifier().db != null) {
            dbName = statement.tableIdentifier().db.getText;
          }
          val tableName = statement.tableIdentifier().table.getText;
          alterOperate(dbName, tableName);
        case statement: TruncateTableContext =>
          logger.info("this is a TruncateTableContext statement");
          var dbName = "";
          if (statement.tableIdentifier().db != null) {
            dbName = statement.tableIdentifier().db.getText;
          }
          val tableName = statement.tableIdentifier().table.getText;
          alterOperate(dbName, tableName);
        case statement: AddTableColumnsContext =>
          logger.info("this is a AddTableColumnsContext statement");
          var dbName = "";
          if (statement.tableIdentifier().db != null) {
            dbName = statement.tableIdentifier().db.getText;
          }
          val tableName = statement.tableIdentifier().table.getText;
          alterOperate(dbName, tableName);
        case statement: AddTablePartitionContext =>
          logger.info("this is a AddTablePartitionContext statement");
          var dbName = "";
          if (statement.tableIdentifier().db != null) {
            dbName = statement.tableIdentifier().db.getText;
          }
          val tableName = statement.tableIdentifier().table.getText;
          alterOperate(dbName, tableName);
        case statement: AlterViewQueryContext =>
          logger.info("this is a AlterViewQueryContext statement");
          var dbName = "";
          if (statement.tableIdentifier().db != null) {
            dbName = statement.tableIdentifier().db.getText;
          }
          val tableName = statement.tableIdentifier().table.getText;
          alterOperate(dbName, tableName);
        case statement: ChangeColumnContext =>
          logger.info("this is a ChangeColumnContext statement");
          var dbName = "";
          if (statement.tableIdentifier().db != null) {
            dbName = statement.tableIdentifier().db.getText;
          }
          val tableName = statement.tableIdentifier().table.getText;
          alterOperate(dbName, tableName);
        case statement: RenameTableContext =>
          logger.info("this is a RenameTableContext statement");
          var dbName = "";
          if (statement.from.db != null) {
            dbName = statement.from.db.getText;
          }
          val tableName = statement.from.table.getText;
          alterOperate(dbName, tableName);
        case statement: RenameTablePartitionContext =>
          logger.info("this is a RenameTablePartitionContext statement");
          var dbName = "";
          if (statement.tableIdentifier.db != null) {
            dbName = statement.tableIdentifier.db.getText;
          }
          val tableName = statement.tableIdentifier.table.getText;
          alterOperate(dbName, tableName);
        case statement: UnsetTablePropertiesContext =>
          logger.info("this is a UnsetTablePropertiesContext statement");
          var dbName = "";
          if (statement.tableIdentifier.db != null) {
            dbName = statement.tableIdentifier.db.getText;
          }
          val tableName = statement.tableIdentifier.table.getText;
          alterOperate(dbName, tableName);
        case statement: SetTableLocationContext =>
          logger.info("this is a SetTableLocationContext statement");
          var dbName = "";
          if (statement.tableIdentifier.db != null) {
            dbName = statement.tableIdentifier.db.getText;
          }
          val tableName = statement.tableIdentifier.table.getText;
          alterOperate(dbName, tableName);
        case statement: SetTablePropertiesContext =>
          logger.info("this is a SetTableLocationContext statement");
          var dbName = "";
          if (statement.tableIdentifier.db != null) {
            dbName = statement.tableIdentifier.db.getText;
          }
          val tableName = statement.tableIdentifier.table.getText;
          alterOperate(dbName, tableName);
        case statement: SetTableSerDeContext =>
          logger.info("this is a SetTableSerDeContext statement");
          var dbName = "";
          if (statement.tableIdentifier.db != null) {
            dbName = statement.tableIdentifier.db.getText;
          }
          val tableName = statement.tableIdentifier.table.getText;
          alterOperate(dbName, tableName);
        case statement: RepairTableContext =>
          logger.info("this is a RepairTableContext statement");
          var dbName = "";
          if (statement.tableIdentifier.db != null) {
            dbName = statement.tableIdentifier.db.getText;
          }
          val tableName = statement.tableIdentifier.table.getText;
          alterOperate(dbName, tableName);
        case statement: RecoverPartitionsContext =>
          logger.info("this is a RecoverPartitionsContext statement");
          var dbName = "";
          if (statement.tableIdentifier.db != null) {
            dbName = statement.tableIdentifier.db.getText;
          }
          val tableName = statement.tableIdentifier.table.getText;
          alterOperate(dbName, tableName);
        case statement: LoadDataContext =>
          logger.info("this is a LoadDataContext statement");
          var dbName = "";
          if (statement.tableIdentifier.db != null) {
            dbName = statement.tableIdentifier.db.getText;
          }
          val tableName = statement.tableIdentifier.table.getText;
          writeOperate(dbName, tableName);
        case statement: StatementDefaultContext =>
          logger.info("this is a StatementDefaultContext statement")
          val resolvedOperates: ArrayList[ResolvedOperate] = new ArrayList[ResolvedOperate]();
          statement.children.foreach { tree =>
            //xpath表示定义的规则,SqlBaseParser.ruleNames
            val xpath = "//fromClause//tableIdentifier"
            XPath.findAll(tree, xpath, parser).foreach { x =>
              val text = x.getText;
              var dbName = "";
              var tableName = "";
              if (text.contains(".")) {
                val textArr = text.split("\\.");
                dbName = textArr.apply(0);
                tableName = textArr.apply(1);
              } else {
                tableName = text;
              }
              val resolvedOperate = new ResolvedOperate();
              resolvedOperate.setDbName(dbName);
              resolvedOperate.setTableName(tableName);
              resolvedOperate.setOperateType(OperateTypeEnum.SELECT);
              resolvedOperates.add(resolvedOperate);
            }

            val insertXpath = "//insertInto//tableIdentifier"
            XPath.findAll(tree, insertXpath, parser).foreach { x =>
              val text = x.getText;
              var dbName = "";
              var tableName = "";
              if (text.contains(".")) {
                val textArr = text.split("\\.");
                dbName = textArr.apply(0);
                tableName = textArr.apply(1);
              } else {
                tableName = text;
              }
              val resolvedOperate = new ResolvedOperate();
              resolvedOperate.setDbName(dbName);
              resolvedOperate.setTableName(tableName);
              resolvedOperate.setOperateType(OperateTypeEnum.WRITE);
              resolvedOperates.add(resolvedOperate);
            }

          }
          resolvedOperates;
        case _ =>
          println("other statement");
          null;
      }
    }
  }

  private def otherOperate(): ArrayList[ResolvedOperate] = {
    val resolvedOperates: ArrayList[ResolvedOperate] = new ArrayList[ResolvedOperate]();
    val resolvedOperate = new ResolvedOperate();
    resolvedOperate.setOperateType(OperateTypeEnum.OTHER);
    resolvedOperates.add(resolvedOperate);
    resolvedOperates;
  }

  private def adminOperate(): ArrayList[ResolvedOperate] = {
    val resolvedOperates: ArrayList[ResolvedOperate] = new ArrayList[ResolvedOperate]();
    val resolvedOperate = new ResolvedOperate();
    resolvedOperate.setOperateType(OperateTypeEnum.ADMIN);
    resolvedOperates.add(resolvedOperate);
    resolvedOperates;
  }

  private def createOperate(dbName: String, tableName: String): ArrayList[ResolvedOperate] = {
    val resolvedOperates: ArrayList[ResolvedOperate] = new ArrayList[ResolvedOperate]();
    val resolvedOperate = new ResolvedOperate();
    resolvedOperate.setDbName(dbName);
    resolvedOperate.setTableName(tableName);
    resolvedOperate.setOperateType(OperateTypeEnum.CREATE);
    resolvedOperates.add(resolvedOperate);
    resolvedOperates;
  }

  private def dropOperate(dbName: String, tableName: String): ArrayList[ResolvedOperate] = {
    val resolvedOperates: ArrayList[ResolvedOperate] = new ArrayList[ResolvedOperate]();
    val resolvedOperate = new ResolvedOperate();
    resolvedOperate.setDbName(dbName);
    resolvedOperate.setTableName(tableName);
    resolvedOperate.setOperateType(OperateTypeEnum.DROP);
    resolvedOperates.add(resolvedOperate);
    resolvedOperates;
  }

  private def alterOperate(dbName: String, tableName: String): ArrayList[ResolvedOperate] = {
    val resolvedOperates: ArrayList[ResolvedOperate] = new ArrayList[ResolvedOperate]();
    val resolvedOperate = new ResolvedOperate();
    resolvedOperate.setDbName(dbName);
    resolvedOperate.setTableName(tableName);
    resolvedOperate.setOperateType(OperateTypeEnum.ALTER);
    resolvedOperates.add(resolvedOperate);
    resolvedOperates;
  }

  private def writeOperate(dbName: String, tableName: String): ArrayList[ResolvedOperate] = {
    val resolvedOperates: ArrayList[ResolvedOperate] = new ArrayList[ResolvedOperate]();
    val resolvedOperate = new ResolvedOperate();
    resolvedOperate.setDbName(dbName);
    resolvedOperate.setTableName(tableName);
    resolvedOperate.setOperateType(OperateTypeEnum.WRITE);
    resolvedOperates.add(resolvedOperate);
    resolvedOperates;
  }

}


package cn.com.legend.permissions.sql

import org.antlr.v4.runtime.atn.PredictionMode
import org.antlr.v4.runtime.misc.ParseCancellationException
import org.antlr.v4.runtime.{ANTLRInputStream, CommonTokenStream, IntStream}
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.parser._
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.trees.Origin


class MySqlParse {

  class ANTLRNoCaseStringStream(input: String) extends ANTLRInputStream(input) {
    override def LA(i: Int): Int = {
      val la = super.LA(i)
      if (la == 0 || la == IntStream.EOF) la
      else Character.toUpperCase(la)
    }
  }

  def parsePlan(sqlText: String): LogicalPlan = parse(sqlText) { parser =>
    val astBuilder = new AstBuilder();
    astBuilder.visitSingleStatement(parser.singleStatement()) match {
      case plan: LogicalPlan => plan
      case _ =>
        val position = Origin(None, None)
        throw new ParseException(Option(sqlText), "Unsupported SQL statement", position, position)
    }
  }

  def parse[T](command: String)(toResult: SqlBaseParser => T): T = {
    val lexer = new SqlBaseLexer(new ANTLRNoCaseStringStream(command))
    lexer.removeErrorListeners()
    lexer.addErrorListener(ParseErrorListener)
    val tokenStream = new CommonTokenStream(lexer)
    val parser = new SqlBaseParser(tokenStream)
    parser.addParseListener(PostProcessor)
    parser.removeErrorListeners()
    parser.addErrorListener(ParseErrorListener)
    try {
      try {
        // first, try parsing with potentially faster SLL mode
        parser.getInterpreter.setPredictionMode(PredictionMode.SLL)
        toResult(parser)
      }
      catch {
        case e: ParseCancellationException =>
          e.printStackTrace();
          // if we fail, parse with LL mode
          tokenStream.reset() // rewind input stream
          parser.reset()
          // Try Again.
          parser.getInterpreter.setPredictionMode(PredictionMode.LL)
          toResult(parser)
      }
    }
    catch {
      case e: ParseException if e.command.isDefined =>
        throw e
      case e: ParseException =>
        throw e.withCommand(command)
      case e: AnalysisException =>
        val position = Origin(e.line, e.startPosition)
        throw new ParseException(Option(command), e.message, position, position)
    }
  }

}

package cn.com.legend.permissions.sql

object OperateTypeEnum extends Enumeration {
  type OperateTypeEnum = Value
  val ADMIN, CREATE, DROP, ALTER, SELECT, WRITE, OTHER = Value

}

package cn.com.legend.permissions.sql

import java.util.ArrayList

import OperateTypeEnum.OperateTypeEnum


class ResolvedOperate {
  /**
    * 操作类型
    **/
  private var operateType: OperateTypeEnum = OperateTypeEnum.OTHER;

  private var dbName: String = null;

  private var tableName: String = null;

  private var oprateTables: ArrayList[Table] = new ArrayList[Table]();

  def getOperateType(): OperateTypeEnum = {
    operateType
  }

  def setOperateType(newOperateType: OperateTypeEnum) = {
    operateType = newOperateType;
  }


  def getOprateTables(): ArrayList[Table] = {
    oprateTables
  }

  def setOprateTables(newOprateTables: ArrayList[Table]) = {
    oprateTables = newOprateTables;
  }

  def getDbName(): String = {
    dbName
  }

  def setDbName(newDbName: String) = {
    dbName = newDbName;
  }

  def getTableName(): String = {
    tableName
  }

  def setTableName(newTableName: String) = {
    tableName = newTableName;
  }

}



package cn.com.legend.permissions.sql

class Table(val dbName: String, val tableName: String) {

}

上述类使用方式如下代码,传入sql语句,可解析出sql,操作类型,操作库表

 ResolvedLogicPlan resolvedLogicPlan = new ResolvedLogicPlan();
 ArrayList<ResolvedOperate> resolvedOperates = resolvedLogicPlan.parseSql(command);

权限校验服务,核心代码如下:

private CheckPermissionModel checkHiveSparkDbOrTablePermissions(CheckPermissionModel checkPermissionModel) {
        try {
            log.info("---requet params----");
            log.info(JsonTools.toJsonStr(checkPermissionModel));
            log.info("---requet params----");
            String userName = checkPermissionModel.getUserName();

            checkPermissionModel.setUserName(userName);

            //判断执行用户是否是超级管理员
            if (SUPER_USER.equals(userName)) {
                checkPermissionModel.setValidateFlag(true);
                return checkPermissionModel;
            }

            //解析sql,返回操作类型
            String command = checkPermissionModel.getCommand();

            //spark sql默认不支持 insert overwrite local directory '/home/wyp/wyp' select * from wyp 类型语法
            if (command.toLowerCase().contains("insert") && command.toLowerCase().contains("directory")) {
                int selectIndex = command.indexOf("select");
                command = command.substring(selectIndex - 1, command.length());
            }

            ResolvedLogicPlan resolvedLogicPlan = new ResolvedLogicPlan();
            ArrayList<ResolvedOperate> resolvedOperates = resolvedLogicPlan.parseSql(command);

            //校验权限
            CheckPermissionModel rCheckPermissionModel = validatePermission(checkPermissionModel, resolvedOperates);
            return rCheckPermissionModel;
        } catch (Exception e) {
            log.error(e.getMessage());
        }

        checkPermissionModel.setValidateFlag(false);
        return checkPermissionModel;
    }

    /**
     * 校验权限
     */
    private CheckPermissionModel validatePermission(CheckPermissionModel checkPermissionModel, ArrayList<ResolvedOperate> resolvedOperates) {
        String userName = checkPermissionModel.getUserName();
        for (ResolvedOperate resolvedOperate : resolvedOperates) {
            Enumeration.Value operateType = resolvedOperate.getOperateType();
            //查看是否是admin操作
            if (OperateTypeEnum.ADMIN().equals(operateType)) {
                checkPermissionModel.setValidateFlag(false);
                checkPermissionModel.setMsg("[user " + userName + " has not admin permissions]");
                return checkPermissionModel;
            }

            //查看是否是other操作,other是默认权限,所有用户都拥有
            if (OperateTypeEnum.OTHER().equals(operateType)) {
                continue;
            }

            //判断命令是否包含关键字temporary
            String command = checkPermissionModel.getCommand();
            if (command.toLowerCase().contains("temporary")) {
                continue;
            }

            CheckPermissionModel validateCheckPermissionModel = validateOperatePermission(checkPermissionModel, resolvedOperate, operateType.toString());
            if (validateCheckPermissionModel.isValidateFlag()) {
                continue;
            }

            return validateCheckPermissionModel;
        }

        checkPermissionModel.setValidateFlag(true);
        return checkPermissionModel;
    }

    /**
     * 校验权限
     */
    private CheckPermissionModel validateOperatePermission(CheckPermissionModel checkPermissionModel, ResolvedOperate resolvedOperate, String operateType) {
        String userName = checkPermissionModel.getUserName();
        String dbName = resolvedOperate.getDbName();
        if (!StringTools.hasText(dbName)) {
            dbName = checkPermissionModel.getDatabaseCurrentName();
        }

        ClusterType clusterType = checkPermissionModel.getClusterType();

        List<HivePermission> hivePermissions = null;

        String key = RedisTools.generateRedisKet(clusterType, userName);
        if (enable) {
            hivePermissions = (List<HivePermission>) redisDao.get(key);
        }

        if (CollectionUtils.isEmpty(hivePermissions)) {
            hivePermissions = hivePermissionsDao.findByUserNameAndClusterType(userName, clusterType.getValue());
            if (CollectionUtils.isEmpty(hivePermissions)) {
                checkPermissionModel.setValidateFlag(false);
                String msg = "[user " + userName + " has not " + operateType.toString() + " permissions]";
                checkPermissionModel.setMsg(msg);
                return checkPermissionModel;
            }

            if (enable) {
                redisDao.put(key, hivePermissions);
            }

        }

        //校验表所在库拥有权限
        HivePermission dbPermission = hivePermissions.stream().filter(x -> Constant.DB_TYPE.equals(x.getPermissionType())).findFirst().get();
        if (dbPermission != null) {
            if (dbPermission.isHasAll()) {
                checkPermissionModel.setValidateFlag(true);
                return checkPermissionModel;
            }

            CheckPermissionModel sitchCaseDbValidateCheck = sitchCaseDbValidate(checkPermissionModel, operateType, dbPermission);
            if (sitchCaseDbValidateCheck != null) {
                return sitchCaseDbValidateCheck;
            }

        }

        //校验表所拥有权限
        String tableName = resolvedOperate.getTableName();

        //判断是否是临时表
        List<String> tempViews = checkPermissionModel.getTempViews();
        if (!CollectionUtils.isEmpty(tempViews)) {
            RequestType requestType = checkPermissionModel.getRequestType();
            String dbTable = "";
            if (RequestType.HIVE_REQUEST.equals(requestType)) {
                dbTable = dbName + "." + tableName;
            }

            if (RequestType.SPARK_REQUEST.equals(requestType)) {
                dbTable = tableName;
            }

            if (tempViews.contains(dbTable.toLowerCase())) {
                checkPermissionModel.setValidateFlag(true);
                log.info(dbName + "." + tableName + " is tempView,user has all permission");
                return checkPermissionModel;
            }
        }

        final String filterDbName = dbName;
        HivePermission tbPermission = hivePermissions.stream().filter(x ->
                x.getPermissionType().equals(Constant.TABLE_TYPE) && tableName.equals(x.getTbName()) && filterDbName.equals(x.getDbName())
        ).findFirst().get();

        if (tbPermission == null) {
            checkPermissionModel.setValidateFlag(false);
            String msg = "[user " + userName + " has not " + operateType + " dbName: " + dbName + " tableName: " + tableName + " permissions,or " + dbName + "." + tableName + " is not exist]";
            checkPermissionModel.setMsg(msg);
            return checkPermissionModel;
        }

        CheckPermissionModel sitchCaseTableValidateCheck = sitchCaseTableValidate(checkPermissionModel, operateType, tbPermission);
        if (sitchCaseTableValidateCheck != null) {
            boolean validateFlag = sitchCaseTableValidateCheck.isValidateFlag();
            if (!validateFlag) {
                String msg = "[user " + userName + " has not " + operateType + " dbName: " + dbName + " tableName: " + tableName + " permissions]";
                sitchCaseTableValidateCheck.setMsg(msg);
            }
            return sitchCaseTableValidateCheck;
        }

        checkPermissionModel.setValidateFlag(false);
        String msg = "[user " + userName + " has not " + operateType.toString() + " permissions]";
        checkPermissionModel.setMsg(msg);
        return checkPermissionModel;
    }

    private CheckPermissionModel sitchCaseTableValidate(CheckPermissionModel checkPermissionModel, String operateType, HivePermission tbPermission) {
        switch (operateType) {
            case "ALTER":
                if (tbPermission.isHasAlter()) {
                    checkPermissionModel.setValidateFlag(true);
                    return checkPermissionModel;
                }
                break;
            case "CREATE":
                if (tbPermission.isHasCreate()) {
                    checkPermissionModel.setValidateFlag(true);
                    return checkPermissionModel;
                }
                break;
            case "DROP":
                if (tbPermission.isHasDrop()) {
                    checkPermissionModel.setValidateFlag(true);
                    return checkPermissionModel;
                }
                break;
            case "SELECT":
                if (tbPermission.isHasSelect()) {
                    checkPermissionModel.setValidateFlag(true);
                    return checkPermissionModel;
                }
                break;
            case "WRITE":
                if (tbPermission.isHasWrite()) {
                    checkPermissionModel.setValidateFlag(true);
                    return checkPermissionModel;
                }
                break;
            default:
                log.error("switch: not case,operateType: " + operateType.toString());
        }

        return null;
    }


    private CheckPermissionModel sitchCaseDbValidate(CheckPermissionModel checkPermissionModel, String operateType, HivePermission dbPermission) {
        switch (operateType) {
            case "ALTER":
                if (dbPermission.isHasAlter()) {
                    checkPermissionModel.setValidateFlag(true);
                    return checkPermissionModel;
                }
                break;
            case "CREATE":
                if (dbPermission.isHasCreate()) {
                    checkPermissionModel.setValidateFlag(true);
                    return checkPermissionModel;
                }
                break;
            case "DROP":
                if (dbPermission.isHasDrop()) {
                    checkPermissionModel.setValidateFlag(true);
                    return checkPermissionModel;
                }
                break;
            case "SELECT":
                if (dbPermission.isHasSelect()) {
                    checkPermissionModel.setValidateFlag(true);
                    return checkPermissionModel;
                }
                break;
            case "WRITE":
                if (dbPermission.isHasWrite()) {
                    checkPermissionModel.setValidateFlag(true);
                    return checkPermissionModel;
                }
                break;
            default:
                log.error("switch: not case,operateType: " + operateType.toString());
        }

        return null;
    }

 

15人点赞

 

大数据权限方案

 

  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值