背景
连接HiveMetastore监听event
前提条件
通过密钥(keytab)认证Kerberos主体(Principal),不需要手动输入密码,但前提是密钥要与Kerberos主体相匹配。
有core-site.xml 文件
直接上代码
package org.example;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
import org.apache.hadoop.security.UserGroupInformation;
@Slf4j
public class HiveClientDemo {
private HiveMetaStoreClient hiveMetaStoreClient;
public static String hiveMetastoreUris = "thrift://cdh-master:9083";
public static String hadoopConfDir = "/Users/yl/JavaProject/hiveApi/conf/";
public static String krb5Conf = "/etc/krb5.conf";
public static String keytab = "/Users/yl/JavaProject/hiveApi/conf/hive.keytab";
public static String keytabPrincipal = "hive/cdh-master@LCC.COM";
void setHiveMetaStoreConf() throws Exception {
HiveConf hiveConf = new HiveConf();
hiveConf.addResource(new org.apache.hadoop.fs.Path(hadoopConfDir + "hive-site.xml"));
hiveConf.addResource(new org.apache.hadoop.fs.Path(hadoopConfDir + "core-site.xml"));
log.info("-------------------------------------------");
log.info("DEFAULT_CONFIG: hadoop.rpc.protection -> " + hiveConf.get("hadoop.rpc.protection"));
if (hiveConf.getVar(HiveConf.ConfVars.METASTOREURIS).isEmpty()) {
hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, hiveMetastoreUris);
}
handleKerberos(hiveConf);
try {
this.hiveMetaStoreClient = new HiveMetaStoreClient(hiveConf);
ping();
} catch (Exception e) {
log.error("setHiveMetaStoreConf error", e);
throw e;
}
}
private void handleKerberos(HiveConf hiveConf) throws Exception {
System.setProperty("java.security.krb5.conf",krb5Conf);
log.info("CONFIG: hadoop.rpc.protection -> " + hiveConf.getVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL));
log.info("CONFIG: hive.server2.authentication -> " + hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION));
if (!hiveConf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL)) {
return;
}
Configuration hadoopConf = new Configuration();
hadoopConf.setBoolean("hadoop.security.authorization", true);
hadoopConf.set("hadoop.security.authentication", "kerberos");
UserGroupInformation.setConfiguration(hiveConf);
log.info("UserGroupInformation.loginUserFromKeytab keytabPrincipal ->" + keytabPrincipal + " keytab -> " +
keytab);
UserGroupInformation.loginUserFromKeytab(keytabPrincipal, keytab);
}
private boolean ping() throws Exception {
log.info("ping");
log.info("show databases");
for (String database : this.hiveMetaStoreClient.getAllDatabases()) {
log.info(database);
}
CurrentNotificationEventId event = this.hiveMetaStoreClient.getCurrentNotificationEventId();
log.info("CurrentNotificationEventId -> " + event.getEventId());
return true;
}
public static void main(String[] args) {
try {
HiveClientDemo client = new HiveClientDemo();
client.setHiveMetaStoreConf();
client.ping();
} catch (Exception e) {
log.error("error", e);
}
}
}
问题
- javax.security.sasl.SaslException: No common protection layer between client and server
HDFS读文件失败报错“No common protection layer”_MapReduce服务 MRS
client使用了默认的配置,与服务端不一致,且这个配置无法通过 setVar 的方式在代码里的修改
不可代码显示的配置hadoop.rpc.protection,配置hadoop_conf 让jar包的自己去读,HADOOP_CONF_DIR=/Users/zhouhao/JavaProject/hiveApi/src/main/resources - no supported default etypes for default_tkt_enctypes
配置hive后启动trino报错KrbException: no supported default etypes for default_tkt_enctypes- krb5.conf 配置
- System.setProperty(“java.security.krb5.conf”,“/etc/krb5.conf”);
- -Djava.security.krb5.conf=./krb5.conf
- krb5.conf 配置