场景:测试idea本地连接hdfs集群并创建目录,上传本地文件
示例:
package com.cntaiping.tpi.ods.pinko.utils
import com.cntaiping.tpi.ods.security.InitKafkaUtil
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.hdfs.DistributedFileSystem
import org.apache.hadoop.security.UserGroupInformation
import java.net.URI
object TestHdfsUtil {
def main(args: Array[String]): Unit = {
val fileSystem = getHdfs()
fileSystem.exists(new Path("/tmp"))
fileSystem.mkdirs(new Path("/tmp/test"))
fileSystem.copyFromLocalFile(new Path("src/main/resources/input/test.txt"), new Path("/tmp/test"))
}
def getHdfs(): FileSystem = {
val configuration = new Configuration()
//定义用户和keytab文件位置
val KEY_TAB_PATH = "out/artifacts/kafka2hdfs_test_jar/user.keytab"
val USER_KEY = "test"
//添加认证配置
System.setProperty("user.keytab", "out/artifacts/kafka2hdfs_test_jar/user.keytab")
System.setProperty("java.security.krb5.conf", "out/artifacts/kafka2hdfs_test_jar/krb5.conf")
configuration.set("hadoop.security.authentication", "kerberos")
configuration.set("hadoop.security.authorization", "true")
//进行认证
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(USER_KEY, KEY_TAB_PATH)
// println(UserGroupInformation.isLoginTicketBased)
configuration.set("fs.hdfs.impl", classOf[DistributedFileSystem].getName)
FileSystem.newInstance(URI.create("hdfs://10.22.33.44:25000"), configuration)
}
}