主要操作包含
- 客户端连接
- 目录
- 读取
- 创建
- 文件的创建
- 删除
- 上传
- 下载
- 部分下载、上传
jar包引入:
由于只用到了hdfs,所以暂时不需要用到mapreduce和yarn
- \share\hadoop\common
- \share\hadoop\common\lib
- \share\hadoop\hdfs
- \share\hadoop\hdfs\lib
- \share\hadoop\tools\lib
以上目录的所有jar包
值得一提的是:如果是windows开发环境注意,IO操作时避免本地库的使用
public class FileOperate {
public static DistributedFileSystem dfs=null;
public static String nameNodeUri="hdfs://hadoop01:9000";
@Before
public void initFileSystem() throws Exception{
System.out.println("初始化hadoop客户端");
//设置hadoop的登录用户名
System.setProperty("HADOOP_USER_NAME", "root");
dfs=new DistributedFileSystem();
dfs.initialize(new URI(nameNodeUri), new Configuration());
System.out.println("客户端连接成功");
Path workingDirectory = dfs.getWorkingDirectory();
System.out.println("工作目录:"+workingDirectory);
}
/**
* 创建文件夹
* @throws Exception
*/
@Test
public void testMkDir() throws Exception{
boolean res = dfs.mkdirs(new Path("/test/aaa/bbb"));
System.out.println("目录创建结果:"+(res?"创建成功":"创建失败"));
}
/**
* 删除目录/文件
* @throws Exception
*/
@Test
public void testDeleteDir() throws Exception{
dfs.delete(new Path("/test/aaa/bbb"), false);
}
/**
* 获取指定目录下所有文件(忽略目录)
* @throws Exception
* @throws IllegalArgumentException
* @throws FileNotFoundException
*/
@Test
public void testFileList() throws Exception{
RemoteIterator<LocatedFileStatus> listFiles = dfs.listFiles(new Path("/"), true);
SimpleDateFormat sdf=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
while (listFiles.hasNext()) {
LocatedFileStatus fileStatus = (LocatedFileStatus) listFiles.next();
//权限
FsPermission permission = fileStatus.getPermission();
//拥有者
String owner = fileStatus.getOwner();
//组
String group = fileStatus.getGroup();
//文件大小byte
long len = fileStatus.getLen();
long modificationTime = fileStatus.getModificationTime();
Path path = fileStatus.getPath();
System.out.println("-------------------------------");
System.out.println("permission:"+permission);
System.out.println("owner:"+owner);
System.out.println("group:"+group);
System.out.println("len:"+len);
System.out.println("modificationTime:"+sdf.format(new Date(modificationTime)));
System.out.println("path:"+path);
}
}
/**
* 【完整】文件上传
* 注意:文件上传在Window开发环境下,使用apache-common提供的<code>org.apache.commons.io.IOUtils.copy</code>可能存在问题
*/
@Test
public void testUploadFullFile() throws Exception{
FSDataOutputStream out = dfs.create(new Path("/test/aaa/uploadFile.txt"), true);
// InputStream in = FileOperate.class.getResourceAsStream("uploadFile.txt");
FileInputStream in = new FileInputStream(FileOperate.class.getResource("uploadFile.txt").getFile());
org.apache.commons.io.IOUtils.copy(in, out);
System.out.println("上传完毕");
}
/**
* 【完整】文件上传
*/
@Test
public void testUploadFullFile2() throws Exception{
dfs.copyFromLocalFile(new Path(FileOperate.class.getResource("uploadFile.txt").getFile()), new Path("/test/aaa/uploadFullFile.txt"));
}
/**
* 【分段|部分】文件上传
* 注意:文件上传在Window开发环境下,使用apache-common提供的<code>org.apache.commons.io.IOUtils.copy</code>可能存在问题
*/
@Test
public void testUploadFile2() throws Exception{
FSDataOutputStream out = dfs.create(new Path("/test/aaa/uploadFile2.txt"), true);
FileInputStream in = new FileInputStream(FileOperate.class.getResource("uploadFile.txt").getFile());
org.apache.commons.io.IOUtils.copyLarge(in, out, 6, 12);
System.out.println("上传完毕");
}
/**
* 【完整】下载文件
* 注意:windows开发平台下,使用如下API
*/
@Test
public void testDownloadFile() throws Exception{
//使用Java API进行I/O,设置useRawLocalFileSystem=true
dfs.copyToLocalFile(false,new Path("/test/aaa/uploadFullFile.txt"), new Path("E:/Workspaces/MyEclipse2014_BigData/hadoop-demo/src/com/xbz/bigdata/hadoop/demo"),true);
System.out.println("下载完成");
}
/**
* 【部分】下载文件
*/
@Test
public void testDownloadFile2() throws Exception{
//使用Java API进行I/O,设置useRawLocalFileSystem=true
FSDataInputStream src = dfs.open(new Path("/test/aaa/uploadFullFile.txt"));
FileOutputStream des = new FileOutputStream(new File("E:/Workspaces/MyEclipse2014_BigData/hadoop-demo/src/com/xbz/bigdata/hadoop/demo","download_uploadFullFile.txt"));
src.seek(6);
org.apache.commons.io.IOUtils.copy(src, des);
System.out.println("下载完成");
}
}