package cn.itning.test
import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.fs.Path
import org.junit.Before
import org.junit.Test
import java.io.FileInputStream
import java.io.FileOutputStream
import java.net.URI
class Test {
private lateinit var fs: FileSystem
@Before
fun getFileSystem() {
/*真实项目应该在ClassPath下建立xxx-site.xml文件 构造会自动加载*/
val configuration = Configuration()
configuration.set("fs.defaultFS", "hdfs://192.168.84.132:9000/")//指定文件系统为DFS
configuration.set("dfs.replication", "1")//指定副本数量
// fs = FileSystem.get(configuration)//获得客户端实例,具体实例哪一种是根据configuration来选择
fs = FileSystem.get(URI("hdfs://192.168.84.132:9000/"), configuration, "hadoop")
}
/*测试上传*/
@Test
fun testHdfsClientUp() {
val path = Path("hdfs://192.168.84.132:9000/tests.dat")//存HDFS的位置
val fsDataOutputStream = fs.create(path)
val fileInputStream = FileInputStream("C://Users//wangn//Desktop//1011.rar")//源文件
IOUtils.copy(fileInputStream, fsDataOutputStream)
}
/*测试下载*/
@Test
fun testHdfsClientDown() {
val path = Path("hdfs://192.168.84.132:9000/tests.dat")//HDFS中的源文件
val fsDataInputStream = fs.open(path)
val fileOutputStream = FileOutputStream("C://Users//wangn//Desktop//aaaa.rar")//输出目录
IOUtils.copy(fsDataInputStream, fileOutputStream)
}
/*增*/
@Test
fun testUpload() {//上传文件
fs.copyFromLocalFile(Path("C://Users//wangn//Desktop//aa.dat"), Path("/test"))
fs.close()
}
@Test
fun testMkdir() {//创建文件夹
fs.mkdirs(Path("/test"))
}
/*删*/
@Test
fun testRemove() {
val delete = fs.delete(Path("/tests.dat"), true)//recursive:递归删除:删文件夹其内容也删除
println("true if delete is successful else false.---$delete")
}
/*改*/
@Test
fun testRename() {//修改文件名
fs.rename(Path("/test/1011.rar"), Path("/test/a.rar"))
}
/*查*/
@Test
fun testListFile() {//遍历文件
val listFiles = fs.listFiles(Path("/"), true)
while (listFiles.hasNext()) {
val locatedFileStatus = listFiles.next()
println(locatedFileStatus)
println(locatedFileStatus.path.name)
}
}
@Test
fun testListFolder() {//遍历文件/文件夹
val listStatus = fs.listStatus(Path("/"))
for (fs in listStatus) {
println(fs.path.name)
}
}
}
Hadoop 客户端 CRUD
最新推荐文章于 2024-12-24 14:50:37 发布
