HDFS常用的Api:
package com.jxd.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
public class HdfsClient {
private FileSystem fs;
@Before
public void init() throws URISyntaxException, IOException, InterruptedException {
URI uri = new URI("hdfs://hadoop102:8020");
Configuration configuration = new Configuration();
String user = "jxd";
fs = FileSystem.get(uri, configuration, user);
}
@After
public void close() throws IOException {
fs.close();
}
/**
* 创建目录
*
* @throws IOException
*/
@Test
public void testMkdir() throws IOException {
fs.mkdirs(new Path("/xiyou/huaguoshan"));
}
/**
* 上传文件
* hdfs-default.xml < hdfs-site.xml < 在resources目录下创建的配置文件 < configuration对象设置
*
* @throws IOException
*/
@Test
public void testPut() throws IOException {
// 是否删除原数据 是否覆盖 原数据路径 目的路径
fs.copyFromLocalFile(false, false, new Path("G:\\input\\hello.txt"), new Path("hdfs://hadoop102/xiyou/huaguoshan"));
}
/**
* 下载文件
*
* @throws IOException
*/
@Test
public void testGet() throws IOException {
// 下载后是否删除hdfs中的文件 原文件的路径 目的地址 是否开启本地校验
fs.copyToLocalFile(false, new Path("hdfs://hadoop102/xiyou/huaguoshan"), new Path("G:\\"), false);
}
/**
* 删除文件
*
* @throws IOException
*/
@Test
public void testRm() throws IOException {
// 目标路径 是否递归删除
fs.delete(new Path("hdfs://hadoop102/sanguo"), true);
}
/**
* 重命名文件和移动文件
*/
@Test
public void renameAndMove() throws IOException {
// 原文件路径 目标文件路径
fs.rename(new Path("hdfs://hadoop102/input/word.txt"), new Path("hdfs://hadoop102/input/word1.txt"));
}
/**
* 获取文件或文件夹的详细信息
*
* @throws IOException
*/
@Test
public void testLookFileDetail() throws IOException {
// 获取所有的目标文件
RemoteIterator<LocatedFileStatus> files = fs.listFiles(new Path("hdfs://hadoop102/"), true);
// 遍历文件
while (files.hasNext()) {
LocatedFileStatus fileStatus = files.next();
// 获取文件的详细信息
System.out.println("Path: " + fileStatus.getPath());
System.out.println("Permission: " + fileStatus.getPermission());
System.out.println("Owner: " + fileStatus.getOwner());
System.out.println("Group: " + fileStatus.getGroup());
System.out.println("FileSize: " + fileStatus.getLen());
System.out.println("ModificationTime: " + fileStatus.getModificationTime());
System.out.println("Replication: " + fileStatus.getReplication());
System.out.println("BlockSize: " + fileStatus.getBlockSize());
System.out.println("Name: " + fileStatus.getPath().getName());
// 获取块信息
BlockLocation[] blockLocations = fileStatus.getBlockLocations();
System.out.println("BlockLocation: " + Arrays.toString(blockLocations));
}
}
/**
* 判断是文件还是目录
* @throws IOException
*/
@Test
public void TestIsFileOrDir() throws IOException {
FileStatus[] listStatus = fs.listStatus(new Path("hdfs://hadoop102/"));
for (FileStatus status : listStatus) {
if(status.isFile()){
System.out.println("文件:" + status.getPath().getName());
}else{
System.out.println("目录:" + status.getPath().getName());
}
}
}
}