通过keytab的方式访问HDFS
Configuration config = new Configuration();
FileSystem hdfs = null;
try {
if (isKerberosEnable){
System.setProperty("java.security.krb5.conf", "./config/kerberos/krb5.conf");
config.set("hadoop.security.authentication","kerberos");
config.addResource("./config/cluster/hadoop/core-site.xml");
config.addResource("./config/cluster/hadoop/hdfs-site.xml");
UserGroupInformation.setConfiguration(config);
UserGroupInformation.loginUserFromKeytab(kerberosLoginUser, "./config/kerberos/hdfs.keytab");
UserGroupInformation userGroupInformation = UserGroupInformation.getLoginUser();
}
hdfs = FileSystem.get(URI.create(hdfsUri), config);
FSDataOutputStream outputStream = hdfs.create(new Path(hdfsFile), true);
IOUtils.copyBytes(inputStream, outputStream, 4096, true);
通过keytab方式访问HBASE
def setKerberosConf(conf: Configuration, sparkConf: SparkConf): Unit = {
conf.set("hbase.zookeeper.quorum", s"${zkaddress}")
conf.setInt("hbase.zookeeper.property.clientPort", 2181)
conf.set("zookeeper.znode.parent", "/hbase")
System.setProperty("java.security.krb5.conf", "conf/kerberos/krb5.conf")
conf.set("hadoop.security.authentication", "kerberos")
conf.set("hbase.security.authentication", "kerberos")
conf.set("keytab.file", Contants.HBASE_KERBEROS_KEYTAB_FILE)
conf.set("kerberos.principal",
sparkConf.get("hbase.kerberos.principal", Contants.HBASE_KERBEROS_PRINCIPAL))
conf.set("hbase.master.kerberos.principal",
sparkConf.get("hbase.master.kerberos.principal", Contants.HBASE_MASTER_KERBEROS_PRINCIPAL))
conf.set("hbase.regionserver.kerberos.principal",
sparkConf.get("hbase.regionserver.kerberos.principal", Contants.HBASE_REGIONSERVER_KERBEROS_PRINCIPAL))
UserGroupInformation.setConfiguration(conf)
UserGroupInformation.loginUserFromKeytab(
sparkConf.get("hbase.kerberos.user", Contants.HBASE_KERBEROS_USER),
Contants.HBASE_KERBEROS_KEYTAB_FILE)
}