Scala本地连接带有kerberos的Hive,网上找了很多但是自己实际用起来还是连不上,所以贴上可用的版本。
XXX.keytab,krb5.conf,truststore这单个文件要在服务器上下载到本地,放在项目resources目录下的Kerberos目录下面。
代码中的confPath路径就是实际本地路径,例如项目resources目录下的Kerberos目录:
"C:\\UseXXX\\Project\\MY_Projects\\test3\\src\\main\\resources\\Kerberos\\"
def initSpark() : SparkConf = {
val isWin = System.getProperty("os.name").toLowerCase().startsWith("windows")
println("current env isWin = "+isWin)
val sparkConf = new SparkConf()
if(isWin){
val confPath = this.getClass.getClassLoader.getResource("Kerberos/").getPath
// val confPath = "C:\\Users\\XXX\\Project\\MY_Projects\\test3\\src\\main\\resources\\Kerberos\\"
println("confPath: "+confPath)
val conf = new Configuration
System.setProperty("java.security.krb5.conf", confPath + "krb5.conf")
System.setProperty("javax.net.ssl.trustStore", confPath + "truststore")
System.setProperty("javax.net.ssl.trustStorePassword", "changeit")
System.setProperty("javax.security.auth.useSubjectCredsOnly", "false")
System.setProperty("sun.security.krb5.debug", "true")
// conf.set("hadoop.security.authentication", "Kerberos")
UserGroupInformation.setConfiguration(conf)
UserGroupInformation.loginUserFromKeytab("XXX@XXX.NET", confPath + "XXX.keytab")
sparkConf.set("spark.yarn.keytab", confPath + "XXX.keytab").set("spark.yarn.principal", "XXX@XXX.NET")
System.out.println(UserGroupInformation.getCurrentUser)
}
return sparkConf
}
调用代码如下:
def main(args: Array[String]): Unit = {
//设置日志输出级别
Logger.getLogger("org").setLevel(Level.WARN)
val sparkSession = SparkSession.builder().appName("test1").master("local[*]").config(InitSpark().initSpark()).enableHiveSupport.getOrCreate
sparkSession.sql("select * from table ").show()
sparkSession.sparkContext.stop()
}