基于sql标准权限验证 修改添加自己的权限验证

由于sql 的权限认证可能不能满足自己的需求,修改来满足自己需求


添加自己的FACTORY
package com.bfd.dw.plugin.hive.security;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
import org.apache.hadoop.hive.ql.security.authorization.plugin.*;
import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAccessControllerWrapper;

/**
 * Created by wenting on 11/3/16.
 */
public class BFDSQLStdHiveAuthorizerFactory implements HiveAuthorizerFactory {

    @Override
    public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
                                               HiveConf conf, HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) throws HiveAuthzPluginException {
        SQLStdHiveAccessControllerWrapper privilegeManager =
                new SQLStdHiveAccessControllerWrapper(metastoreClientFactory, conf, authenticator, ctx);
        return new HiveAuthorizerImpl(
                privilegeManager,
                new BFDSQLStdHiveAuthorizationValidator(metastoreClientFactory, conf, authenticator,
                        privilegeManager, ctx) //需要创建自己的Validator
        );
    }
}


实现BFDSQLStdHiveAuthorizationValidator
package com.bfd.dw.plugin.hive.security;

import java.util.ArrayList;
import java.util.Collection;
import java.util.List;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationValidator;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext.CLIENT_TYPE;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.*;
import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.Operation2Privilege.IOType;

/**
 * Created by wenting on 11/3/16.
 */
public class BFDSQLStdHiveAuthorizationValidator implements HiveAuthorizationValidator {

    private final HiveMetastoreClientFactory metastoreClientFactory;
    private final HiveConf conf;
    private final HiveAuthenticationProvider authenticator;
    private final SQLStdHiveAccessControllerWrapper privController;
    private final HiveAuthzSessionContext ctx;
    public static final Log LOG = LogFactory.getLog(SQLStdHiveAuthorizationValidator.class);

    public BFDSQLStdHiveAuthorizationValidator(HiveMetastoreClientFactory metastoreClientFactory,
                                            HiveConf conf, HiveAuthenticationProvider authenticator,
                                            SQLStdHiveAccessControllerWrapper privilegeManager, HiveAuthzSessionContext ctx)
            throws HiveAuthzPluginException {

        this.metastoreClientFactory = metastoreClientFactory;
        this.conf = conf;
        this.authenticator = authenticator;
        this.privController = privilegeManager;
        this.ctx = SQLAuthorizationUtils.applyTestSettings(ctx, conf);
        assertHiveCliAuthDisabled(conf);
    }

    private void assertHiveCliAuthDisabled(HiveConf conf) throws HiveAuthzPluginException {
        if (ctx.getClientType() == CLIENT_TYPE.HIVECLI
                && conf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
            throw new HiveAuthzPluginException(
                    "SQL standards based authorization should not be enabled from hive cli"
                            + "Instead the use of storage based authorization in hive metastore is reccomended. Set "
                            + ConfVars.HIVE_AUTHORIZATION_ENABLED.varname + "=false to disable authz within cli");
        }
    }

    @Override
    public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs,
                                List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context)
            throws HiveAuthzPluginException, HiveAccessControlException {

        if (LOG.isDebugEnabled()) {
            String msg = "Checking privileges for operation " + hiveOpType + " by user "
                    + authenticator.getUserName() + " on " + " input objects " + inputHObjs
                    + " and output objects " + outputHObjs + ". Context Info: " + context;
            LOG.debug(msg);
        }

        String userName = authenticator.getUserName();
        IMetaStoreClient metastoreClient = metastoreClientFactory.getHiveMetastoreClient();

        // check privileges on input and output objects
        List<String> deniedMessages = new ArrayList<String>();
        checkPrivileges(hiveOpType, inputHObjs, metastoreClient, userName, IOType.INPUT, deniedMessages);
        checkPrivileges(hiveOpType, outputHObjs, metastoreClient, userName, IOType.OUTPUT, deniedMessages);

        SQLAuthorizationUtils.assertNoDeniedPermissions(new HivePrincipal(userName,
                HivePrincipalType.USER), hiveOpType, deniedMessages);
    }

    private void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> hiveObjects,
                                 IMetaStoreClient metastoreClient, String userName, IOType ioType, List<String> deniedMessages)
            throws HiveAuthzPluginException, HiveAccessControlException {

        if (hiveObjects == null) {
            return;
        }

        // Compare required privileges and available privileges for each hive object
        for (HivePrivilegeObject hiveObj : hiveObjects) {

            RequiredPrivileges requiredPrivs = Operation2Privilege.getRequiredPrivs(hiveOpType, hiveObj,
                    ioType);

            if(requiredPrivs.getRequiredPrivilegeSet().isEmpty()){
                // no privileges required, so don't need to check this object privileges
                continue;
            }

            // find available privileges
            RequiredPrivileges availPrivs = new RequiredPrivileges(); //start with an empty priv set;
            switch (hiveObj.getType()) {
                case LOCAL_URI:// URI的处理逻辑
                case DFS_URI:
                    availPrivs = SQLAuthorizationUtils.getPrivilegesFromFS(new Path(hiveObj.getObjectName()),
                            conf, userName);
                    if (privController.isUserAdmin()) {
                        availPrivs.addPrivilege(SQLPrivTypeGrant.ADMIN_PRIV);
                    }
                    break;
                case PARTITION:
                    // sql std authorization is managing privileges at the table/view levels
                    // only
                    // ignore partitions
                    continue;
                case COMMAND_PARAMS:
                    continue;
                case FUNCTION: //function 的处理逻辑修改
                    // operations that have objects of type COMMAND_PARAMS, FUNCTION are authorized
                    // solely on the type
                    availPrivs.addPrivilege(SQLPrivTypeGrant.ADMIN_PRIV);
                    /*if (privController.isUserAdmin()) {
                        availPrivs.addPrivilege(SQLPrivTypeGrant.ADMIN_PRIV);
                    }*/
                    break;
                default:
                    availPrivs = SQLAuthorizationUtils.getPrivilegesFromMetaStore(metastoreClient, userName,
                            hiveObj, privController.getCurrentRoleNames(), privController.isUserAdmin());
            }

            // Verify that there are no missing privileges
            Collection<SQLPrivTypeGrant> missingPriv = requiredPrivs.findMissingPrivs(availPrivs);
            SQLAuthorizationUtils.addMissingPrivMsg(missingPriv, hiveObj, deniedMessages);

        }
    }

    public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs, HiveAuthzContext context) {
        if(LOG.isDebugEnabled()) {
            String msg = "Obtained following objects in  filterListCmdObjects " + listObjs + " for user " + this.authenticator.getUserName() + ". Context Info: " + context;
            LOG.debug(msg);
        }

        return listObjs;
    }


}

修改SQLAuthorizationUtils.java

package com.bfd.dw.plugin.hive.security;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.ListIterator;
import java.util.Locale;
import java.util.Map;
import java.util.Set;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
import org.apache.hadoop.hive.metastore.api.HiveObjectType;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext.CLIENT_TYPE;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.RequiredPrivileges;
import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLPrivTypeGrant;
import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLPrivilegeType;
import org.apache.thrift.TException;

public class SQLAuthorizationUtils {

    private static final String[] SUPPORTED_PRIVS = { "INSERT", "UPDATE", "DELETE", "SELECT" };
    private static final Set<String> SUPPORTED_PRIVS_SET = new HashSet<String>(
            Arrays.asList(SUPPORTED_PRIVS));
    public static final Log LOG = LogFactory.getLog(SQLAuthorizationUtils.class);

    /**
     * Create thrift privileges bag
     *
     * @param hivePrincipals
     * @param hivePrivileges
     * @param hivePrivObject
     * @param grantorPrincipal
     * @param grantOption
     * @return
     * @throws HiveAuthzPluginException
     */
    static PrivilegeBag getThriftPrivilegesBag(List<HivePrincipal> hivePrincipals,
                                               List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
                                               HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException {
        HiveObjectRef privObj = getThriftHiveObjectRef(hivePrivObject);
        PrivilegeBag privBag = new PrivilegeBag();
        for (HivePrivilege privilege : hivePrivileges) {
            if (privilege.getColumns() != null && privilege.getColumns().size() > 0) {
                throw new HiveAuthzPluginException("Privileges on columns not supported currently"
                        + " in sql standard authorization mode");
            }
            if (!SUPPORTED_PRIVS_SET.contains(privilege.getName().toUpperCase(Locale.US))) {
                throw new HiveAuthzPluginException("Privilege: " + privilege.getName()
                        + " is not supported in sql standard authorization mode");
            }
            PrivilegeGrantInfo grantInfo = getThriftPrivilegeGrantInfo(privilege, grantorPrincipal,
                    grantOption, 0 /*real grant time added by metastore*/);
            for (HivePrincipal principal : hivePrincipals) {
                HiveObjectPrivilege objPriv = new HiveObjectPrivilege(privObj, principal.getName(),
                        AuthorizationUtils.getThriftPrincipalType(principal.getType()), grantInfo);
                privBag.addToPrivileges(objPriv);
            }
        }
        return privBag;
    }

    static PrivilegeGrantInfo getThriftPrivilegeGrantInfo(HivePrivilege privilege,
                                                          HivePrincipal grantorPrincipal, boolean grantOption, int grantTime)
            throws HiveAuthzPluginException {
        try {
            return AuthorizationUtils.getThriftPrivilegeGrantInfo(privilege, grantorPrincipal,
                    grantOption, grantTime);
        } catch (HiveException e) {
            throw new HiveAuthzPluginException(e);
        }
    }

    /**
     * Create a thrift privilege object from the plugin interface privilege object
     *
     * @param privObj
     * @return
     * @throws HiveAuthzPluginException
     */
    static HiveObjectRef getThriftHiveObjectRef(HivePrivilegeObject privObj)
            throws HiveAuthzPluginException {
        try {
            return AuthorizationUtils.getThriftHiveObjectRef(privObj);
        } catch (HiveException e) {
            throw new HiveAuthzPluginException(e);
        }
    }

    static HivePrivilegeObjectType getPluginObjType(HiveObjectType objectType)
            throws HiveAuthzPluginException {
        switch (objectType) {
            case DATABASE:
                return HivePrivilegeObjectType.DATABASE;
            case TABLE:
                return HivePrivilegeObjectType.TABLE_OR_VIEW;
            case COLUMN:
            case GLOBAL:
            case PARTITION:
                throw new HiveAuthzPluginException("Unsupported object type " + objectType);
            default:
                throw new AssertionError("Unexpected object type " + objectType);
        }
    }

    /**
     * Check if the privileges are acceptable for SQL Standard authorization implementation
     * @param hivePrivileges
     * @throws HiveAuthzPluginException
     */
    public static void validatePrivileges(List<HivePrivilege> hivePrivileges) throws HiveAuthzPluginException {
        for (HivePrivilege hivePrivilege : hivePrivileges) {
            if (hivePrivilege.getColumns() != null && hivePrivilege.getColumns().size() != 0) {
                throw new HiveAuthzPluginException(
                        "Privilege with columns are not currently supported with sql standard authorization:"
                                + hivePrivilege);
            }
            //try converting to the enum to verify that this is a valid privilege type
            SQLPrivilegeType.getRequirePrivilege(hivePrivilege.getName());

        }
    }

    /**
     * Get the privileges this user(userName argument) has on the object
     * (hivePrivObject argument) If isAdmin is true, adds an admin privilege as
     * well.
     *
     * @param metastoreClient
     * @param userName
     * @param hivePrivObject
     * @param curRoles
     *          current active roles for user
     * @param isAdmin
     *          if user can run as admin user
     * @return
     * @throws HiveAuthzPluginException
     */
    static RequiredPrivileges getPrivilegesFromMetaStore(IMetaStoreClient metastoreClient,
                                                         String userName, HivePrivilegeObject hivePrivObject, List<String> curRoles, boolean isAdmin)
            throws HiveAuthzPluginException {

        // get privileges for this user and its role on this object
        PrincipalPrivilegeSet thrifPrivs = null;
        try {
            HiveObjectRef objectRef = AuthorizationUtils.getThriftHiveObjectRef(hivePrivObject);
            if (objectRef.getObjectType() == null) {
                objectRef.setObjectType(HiveObjectType.GLOBAL);
            }
            thrifPrivs = metastoreClient.get_privilege_set(
                    objectRef, userName, null);
        } catch (MetaException e) {
            throwGetPrivErr(e, hivePrivObject, userName);
        } catch (TException e) {
            throwGetPrivErr(e, hivePrivObject, userName);
        } catch (HiveException e) {
            throwGetPrivErr(e, hivePrivObject, userName);
        }

        filterPrivsByCurrentRoles(thrifPrivs, curRoles);

        // convert to RequiredPrivileges
        RequiredPrivileges privs = getRequiredPrivsFromThrift(thrifPrivs);

        // add owner privilege if user is owner of the object
        if (isOwner(metastoreClient, userName, curRoles, hivePrivObject)) {
            privs.addPrivilege(SQLPrivTypeGrant.OWNER_PRIV);
        }
        if (isAdmin) {
            privs.addPrivilege(SQLPrivTypeGrant.ADMIN_PRIV);
        }

        return privs;
    }

    /**
     * Remove any role privileges that don't belong to the roles in curRoles
     * @param thriftPrivs
     * @param curRoles
     * @return
     */
    private static void filterPrivsByCurrentRoles(PrincipalPrivilegeSet thriftPrivs,
                                                  List<String> curRoles) {
        // check if there are privileges to be filtered
        if(thriftPrivs == null || thriftPrivs.getRolePrivileges() == null
                || thriftPrivs.getRolePrivilegesSize() == 0
                ){
            // no privileges to filter
            return;
        }

        // add the privs for roles in curRoles to new role-to-priv map
        Map<String, List<PrivilegeGrantInfo>> filteredRolePrivs = new HashMap<String, List<PrivilegeGrantInfo>>();
        for(String role : curRoles){
            List<PrivilegeGrantInfo> privs = thriftPrivs.getRolePrivileges().get(role);
            if(privs != null){
                filteredRolePrivs.put(role, privs);
            }
        }
        thriftPrivs.setRolePrivileges(filteredRolePrivs);
    }

    /**
     * Check if user is owner of the given object
     *
     * @param metastoreClient
     * @param userName
     *          current user
     * @param curRoles
     *          current roles for userName
     * @param hivePrivObject
     *          given object
     * @return true if user is owner
     * @throws HiveAuthzPluginException
     */
    private static boolean isOwner(IMetaStoreClient metastoreClient, String userName,
                                   List<String> curRoles, HivePrivilegeObject hivePrivObject) throws HiveAuthzPluginException {
        // for now, check only table & db
        switch (hivePrivObject.getType()) {
            case TABLE_OR_VIEW: {
                Table thriftTableObj = null;
                try {
                    thriftTableObj = metastoreClient.getTable(hivePrivObject.getDbname(),
                            hivePrivObject.getObjectName());
                } catch (Exception e) {
                    throwGetObjErr(e, hivePrivObject);
                }
                return userName.equals(thriftTableObj.getOwner());
            }
            case DATABASE: {
                if (MetaStoreUtils.DEFAULT_DATABASE_NAME.equalsIgnoreCase(hivePrivObject.getDbname())) {
                    return true;
                }
                Database db = null;
                try {
                    db = metastoreClient.getDatabase(hivePrivObject.getDbname());
                } catch (Exception e) {
                    throwGetObjErr(e, hivePrivObject);
                }
                // a db owner can be a user or a role
                if(db.getOwnerType() == PrincipalType.USER){
                    return userName.equals(db.getOwnerName());
                } else if(db.getOwnerType() == PrincipalType.ROLE){
                    // check if any of the roles of this user is an owner
                    return curRoles.contains(db.getOwnerName());
                } else {
                    // looks like owner is an unsupported type
                    LOG.warn("Owner of database " + db.getName() + " is of unsupported type "
                            + db.getOwnerType());
                    return false;
                }
            }
            case DFS_URI:
            case LOCAL_URI:
            case PARTITION:
            default:
                return false;
        }
    }

    private static void throwGetObjErr(Exception e, HivePrivilegeObject hivePrivObject)
            throws HiveAuthzPluginException {
        String msg = "Error getting object from metastore for " + hivePrivObject;
        throw new HiveAuthzPluginException(msg, e);
    }

    private static void throwGetPrivErr(Exception e, HivePrivilegeObject hivePrivObject,
                                        String userName) throws HiveAuthzPluginException {
        String msg = "Error getting privileges on " + hivePrivObject + " for " + userName + ": "
                + e.getMessage();
        throw new HiveAuthzPluginException(msg, e);
    }

    private static RequiredPrivileges getRequiredPrivsFromThrift(PrincipalPrivilegeSet thrifPrivs)
            throws HiveAuthzPluginException {

        RequiredPrivileges reqPrivs = new RequiredPrivileges();
        // add user privileges
        Map<String, List<PrivilegeGrantInfo>> userPrivs = thrifPrivs.getUserPrivileges();
        if (userPrivs != null && userPrivs.size() != 1) {
            throw new HiveAuthzPluginException("Invalid number of user privilege objects: "
                    + userPrivs.size());
        }
        addRequiredPrivs(reqPrivs, userPrivs);

        // add role privileges
        Map<String, List<PrivilegeGrantInfo>> rolePrivs = thrifPrivs.getRolePrivileges();
        addRequiredPrivs(reqPrivs, rolePrivs);
        return reqPrivs;
    }

    /**
     * Add privileges to RequiredPrivileges object reqPrivs from thrift availPrivs
     * object
     * @param reqPrivs
     * @param availPrivs
     * @throws HiveAuthzPluginException
     */
    private static void addRequiredPrivs(RequiredPrivileges reqPrivs,
                                         Map<String, List<PrivilegeGrantInfo>> availPrivs) throws HiveAuthzPluginException {
        if(availPrivs == null){
            return;
        }
        for (Map.Entry<String, List<PrivilegeGrantInfo>> userPriv : availPrivs.entrySet()) {
            List<PrivilegeGrantInfo> userPrivGInfos = userPriv.getValue();
            for (PrivilegeGrantInfo userPrivGInfo : userPrivGInfos) {
                reqPrivs.addPrivilege(userPrivGInfo.getPrivilege(), userPrivGInfo.isGrantOption());
            }
        }
    }

    public static void addMissingPrivMsg(Collection<SQLPrivTypeGrant> missingPrivs,
                                         HivePrivilegeObject hivePrivObject, List<String> deniedMessages) {
        if (missingPrivs.size() != 0) {
            // there are some required privileges missing, create error message
            // sort the privileges so that error message is deterministic (for tests)
            List<SQLPrivTypeGrant> sortedmissingPrivs = new ArrayList<SQLPrivTypeGrant>(missingPrivs);
            Collections.sort(sortedmissingPrivs);
            String errMsg = sortedmissingPrivs + " on " + hivePrivObject;
            deniedMessages.add(errMsg);
        }
    }

    /**
     * Map permissions for this uri to SQL Standard privileges
     * @param filePath
     * @param conf
     * @param userName
     * @return
     * @throws HiveAuthzPluginException
     */
    public static RequiredPrivileges getPrivilegesFromFS(Path filePath, HiveConf conf,
                                                         String userName) throws HiveAuthzPluginException {
        // get the 'available privileges' from file system


        RequiredPrivileges availPrivs = new RequiredPrivileges();
        // check file system permission
        FileSystem fs;
        try {
            fs = FileSystem.get(filePath.toUri(), conf);//local dfs URL 处理的逻辑 修改
            FileStatus fileStatus = FileUtils.getPathOrParentThatExists(fs, filePath);
            /*if (FileUtils.isOwnerOfFileHierarchy(fs, fileStatus, userName)) {
                availPrivs.addPrivilege(SQLPrivTypeGrant.OWNER_PRIV);
            }*/
            availPrivs.addPrivilege(SQLPrivTypeGrant.OWNER_PRIV);
            availPrivs.addPrivilege(SQLPrivTypeGrant.INSERT_NOGRANT);
            availPrivs.addPrivilege(SQLPrivTypeGrant.DELETE_NOGRANT);
            availPrivs.addPrivilege(SQLPrivTypeGrant.SELECT_NOGRANT);

            /*if (FileUtils.isActionPermittedForFileHierarchy(fs, fileStatus, userName, FsAction.WRITE)) {
                availPrivs.addPrivilege(SQLPrivTypeGrant.INSERT_NOGRANT);
                availPrivs.addPrivilege(SQLPrivTypeGrant.DELETE_NOGRANT);
            }
            if (FileUtils.isActionPermittedForFileHierarchy(fs, fileStatus, userName, FsAction.READ)) {
                availPrivs.addPrivilege(SQLPrivTypeGrant.SELECT_NOGRANT);
            }*/
        } catch (Exception e) {
            String msg = "Error getting permissions for " + filePath + ": " + e.getMessage();
            throw new HiveAuthzPluginException(msg, e);
        }
        return availPrivs;
    }

    public static void assertNoDeniedPermissions(HivePrincipal hivePrincipal,
                                                 HiveOperationType hiveOpType, List<String> deniedMessages) throws HiveAccessControlException {
        if (deniedMessages.size() != 0) {
            Collections.sort(deniedMessages);
            String errorMessage = "Permission denied: " + hivePrincipal
                    + " does not have following privileges for operation " + hiveOpType + " "
                    + deniedMessages;
            throw new HiveAccessControlException(errorMessage);
        }
    }

    static HiveAuthzPluginException getPluginException(String prefix, Exception e) {
        return new HiveAuthzPluginException(prefix + ": " + e.getMessage(), e);
    }

    /**
     * Validate the principal type, and convert role name to lower case
     * @param hPrincipal
     * @return validated principal
     * @throws HiveAuthzPluginException
     */
    public static HivePrincipal getValidatedPrincipal(HivePrincipal hPrincipal)
            throws HiveAuthzPluginException {
        if (hPrincipal == null || hPrincipal.getType() == null) {
            // null principal
            return hPrincipal;
        }
        switch (hPrincipal.getType()) {
            case USER:
                return hPrincipal;
            case ROLE:
                // lower case role names, for case insensitive behavior
                return new HivePrincipal(hPrincipal.getName().toLowerCase(), hPrincipal.getType());
            default:
                throw new HiveAuthzPluginException("Invalid principal type in principal " + hPrincipal);
        }
    }

    /**
     * Calls getValidatedPrincipal on each principal in list and updates the list
     * @param hivePrincipals
     * @return
     * @return
     * @throws HiveAuthzPluginException
     */
    public static List<HivePrincipal> getValidatedPrincipals(List<HivePrincipal> hivePrincipals)
            throws HiveAuthzPluginException {
        ListIterator<HivePrincipal> it = hivePrincipals.listIterator();
        while(it.hasNext()){
            it.set(getValidatedPrincipal(it.next()));
        }
        return hivePrincipals;
    }

    /**
     * Change the session context based on configuration to aid in testing of sql
     * std auth
     *
     * @param ctx
     * @param conf
     * @return
     */
    static HiveAuthzSessionContext applyTestSettings(HiveAuthzSessionContext ctx, HiveConf conf) {
        if (conf.getBoolVar(ConfVars.HIVE_TEST_AUTHORIZATION_SQLSTD_HS2_MODE)
                && ctx.getClientType() == CLIENT_TYPE.HIVECLI) {
            // create new session ctx object with HS2 as client type
            HiveAuthzSessionContext.Builder ctxBuilder = new HiveAuthzSessionContext.Builder(ctx);
            ctxBuilder.setClientType(CLIENT_TYPE.HIVESERVER2);
            return ctxBuilder.build();
        }
        return ctx;
    }

}


红色表示表示为了local_uri 和函数对应的修改,是不是很简单。

评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值